summaryrefslogtreecommitdiffstats
path: root/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler
diff options
context:
space:
mode:
Diffstat (limited to 'debian/pyrex/pyrex-0.9.9/Pyrex/Compiler')
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Builtin.py276
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/CmdLine.py94
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Code.py546
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/DebugFlags.py4
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Errors.py77
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ExprNodes.py3954
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Filenames.py9
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.picklebin0 -> 19254 bytes
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.py145
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Main.py564
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ModuleNode.py1678
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Naming.py69
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Nodes.py3249
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Options.py5
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Parsing.py2142
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/PyrexTypes.py974
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Scanning.py390
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Symtab.py1342
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/TypeSlots.py629
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Version.py1
-rw-r--r--debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/__init__.py0
21 files changed, 16148 insertions, 0 deletions
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Builtin.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Builtin.py
new file mode 100644
index 00000000..62dbfbef
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Builtin.py
@@ -0,0 +1,276 @@
+#
+# Pyrex - Builtin Definitions
+#
+
+from Symtab import BuiltinScope
+from TypeSlots import Signature
+from PyrexTypes import py_type_type, c_size_t_type, c_py_ssize_t_type
+
+builtin_constant_table = [
+ # name, type/ctype, C API name
+ ("buffer", "t", "(&PyBuffer_Type)"),
+ ("enumerate", "t", "(&PyEnum_Type)"),
+ ("file", "t", "(&PyFile_Type)"),
+ ("float", "t", "(&PyFloat_Type)"),
+ ("int", "t", "(&PyInt_Type)"),
+ ("long", "t", "(&PyLong_Type)"),
+ ("open", "t", "(&PyFile_Type)"),
+ ("property", "t", "(&PyProperty_Type)"),
+ ("str", "t", "(&PyString_Type)"),
+ ("tuple", "t", "(&PyTuple_Type)"),
+ ("xrange", "t", "(&PyRange_Type)"),
+
+ ("True", "O", "Py_True"),
+ ("False", "O", "Py_False"),
+ ("Ellipsis", "O", "Py_Ellipsis"),
+
+ ("Exception", "t/O", "PyExc_Exception"),
+ ("StopIteration", "t/O", "PyExc_StopIteration"),
+ ("StandardError", "t/O", "PyExc_StandardError"),
+ ("ArithmeticError", "t/O", "PyExc_ArithmeticError"),
+ ("LookupError", "t/O", "PyExc_LookupError"),
+
+ ("AssertionError", "t/O", "PyExc_AssertionError"),
+ ("EOFError", "t/O", "PyExc_EOFError"),
+ ("FloatingPointError", "t/O", "PyExc_FloatingPointError"),
+ ("EnvironmentError", "t/O", "PyExc_EnvironmentError"),
+ ("IOError", "t/O", "PyExc_IOError"),
+ ("OSError", "t/O", "PyExc_OSError"),
+ ("ImportError", "t/O", "PyExc_ImportError"),
+ ("IndexError", "t/O", "PyExc_IndexError"),
+ ("KeyError", "t/O", "PyExc_KeyError"),
+ ("KeyboardInterrupt", "t/O", "PyExc_KeyboardInterrupt"),
+ ("MemoryError", "t/O", "PyExc_MemoryError"),
+ ("NameError", "t/O", "PyExc_NameError"),
+ ("OverflowError", "t/O", "PyExc_OverflowError"),
+ ("RuntimeError", "t/O", "PyExc_RuntimeError"),
+ ("NotImplementedError", "t/O", "PyExc_NotImplementedError"),
+ ("SyntaxError", "t/O", "PyExc_SyntaxError"),
+ ("IndentationError", "t/O", "PyExc_IndentationError"),
+ ("TabError", "t/O", "PyExc_TabError"),
+ ("ReferenceError", "t/O", "PyExc_ReferenceError"),
+ ("SystemError", "t/O", "PyExc_SystemError"),
+ ("SystemExit", "t/O", "PyExc_SystemExit"),
+ ("TypeError", "t/O", "PyExc_TypeError"),
+ ("UnboundLocalError", "t/O", "PyExc_UnboundLocalError"),
+ ("UnicodeError", "t/O", "PyExc_UnicodeError"),
+ ("UnicodeEncodeError", "t/O", "PyExc_UnicodeEncodeError"),
+ ("UnicodeDecodeError", "t/O", "PyExc_UnicodeDecodeError"),
+ ("UnicodeTranslateError", "t/O", "PyExc_UnicodeTranslateError"),
+ ("ValueError", "t/O", "PyExc_ValueError"),
+ ("ZeroDivisionError", "t/O", "PyExc_ZeroDivisionError"),
+ # Not including these by default because they are platform-specific
+ #("WindowsError", "t/O", "PyExc_WindowsError"),
+ #("VMSError", "t/O", "PyExc_VMSError"),
+
+ ("MemoryErrorInst", "t/O", "PyExc_MemoryErrorInst"),
+
+ ("Warning", "t/O", "PyExc_Warning"),
+ ("UserWarning", "t/O", "PyExc_UserWarning"),
+ ("DeprecationWarning", "t/O", "PyExc_DeprecationWarning"),
+ ("PendingDeprecationWarning", "t/O", "PyExc_PendingDeprecationWarning"),
+ ("SyntaxWarning", "t/O", "PyExc_SyntaxWarning"),
+ ("OverflowWarning", "t/O", "PyExc_OverflowWarning"),
+ ("RuntimeWarning", "t/O", "PyExc_RuntimeWarning"),
+ ("FutureWarning", "t/O", "PyExc_FutureWarning"),
+
+]
+
+builtin_function_table = [
+ # name, args, return, C API func, py equiv = "*"
+ ('abs', "O", "O", "PyNumber_Absolute"),
+ ('bool', "O", "i", "PyObject_IsTrue"),
+ #('chr', "", "", ""),
+ #('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
+ #('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
+ ('delattr', "OO", "r", "PyObject_DelAttr"),
+ ('dir', "O", "O", "PyObject_Dir"),
+ ('divmod', "OO", "O", "PyNumber_Divmod"),
+ #('eval', "", "", ""),
+ #('execfile', "", "", ""),
+ #('filter', "", "", ""),
+ ('getattr', "OO", "O", "PyObject_GetAttr"),
+ ('getattr3', "OOO", "O", "__Pyx_GetAttr3", "getattr"),
+ ('hasattr', "OO", "i", "PyObject_HasAttr"),
+ ('hash', "O", "l", "PyObject_Hash"),
+ #('hex', "", "", ""),
+ #('id', "", "", ""),
+ #('input', "", "", ""),
+ ('cintern', "s", "O", "PyString_InternFromString"), # different name because doesn't handle null bytes
+ ('isinstance', "OO", "i", "PyObject_IsInstance"),
+ ('issubclass', "OO", "i", "PyObject_IsSubclass"),
+ ('iter', "O", "O", "PyObject_GetIter"),
+ ('iter2', "OO", "O", "PyCallIter_New"),
+ ('len', "O", "Z", "PyObject_Length"),
+ #('map', "", "", ""),
+ #('max', "", "", ""),
+ #('min', "", "", ""),
+ #('oct', "", "", ""),
+ # Not worth doing open, when second argument would become mandatory
+ #('open', "ss", "O", "PyFile_FromString"),
+ #('ord', "", "", ""),
+ ('pow', "OOO", "O", "PyNumber_Power"),
+ #('range', "", "", ""),
+ #('raw_input', "", "", ""),
+ #('reduce', "", "", ""),
+ ('reload', "O", "O", "PyImport_ReloadModule"),
+ ('repr', "O", "O", "PyObject_Repr"),
+ #('round', "", "", ""),
+ ('setattr', "OOO", "r", "PyObject_SetAttr"),
+ #('sum', "", "", ""),
+ #('unichr', "", "", ""),
+ #('unicode', "", "", ""),
+ #('vars', "", "", ""),
+ #('zip', "", "", ""),
+ ('typecheck', "Ot", "b", "PyObject_TypeCheck", False),
+ ('issubtype', "tt", "b", "PyType_IsSubtype", False),
+]
+
+dict_methods = [
+ # name, args, return, C API func
+ ("clear", "O", "v", "PyDict_Clear"),
+ ("copy", "O", "O", "PyDict_Copy"),
+ ("items", "O", "O", "PyDict_Items"),
+ ("keys", "O", "O", "PyDict_Keys"),
+ ("values", "O", "O", "PyDict_Values"),
+ ("merge", "OOi", "r", "PyDict_Merge"),
+ ("update", "OO", "r", "PyDict_Update"),
+ ("merge_pairs", "OOi", "r", "PyDict_MergeFromSeq2"),
+]
+
+list_methods = [
+ # name, args, return, C API func
+ ("insert", "OiO", "r", "PyList_Insert"),
+ ("append", "OO", "r", "PyList_Append"),
+ ("iappend", "OO", "i", "PyList_Append"),
+ ("sort", "O", "r", "PyList_Sort"),
+ ("reverse", "O", "r", "PyList_Reverse"),
+ ("as_tuple", "O", "O", "PyList_AsTuple"),
+]
+
+slice_methods = [
+ # name, args, return, C API func
+ ("indices", "O", "O", "PySlice_Indices"),
+]
+
+slice_members = [
+ # name, type
+ ("start", "O"),
+ ("stop", "O"),
+ ("step", "O"),
+]
+
+builtin_c_type_table = [
+ ("size_t", c_size_t_type),
+ ("Py_ssize_t", c_py_ssize_t_type),
+]
+
+builtin_type_table = [
+ # name, objstruct, typeobj, methods, members, flags
+# bool - function
+# buffer - constant
+# classmethod
+ ("dict", "PyDictObject", "PyDict_Type", dict_methods),
+# enumerate - constant
+# file - constant
+# float - constant
+# int - constant
+ ("list", "PyListObject", "PyList_Type", list_methods, [], ['is_sequence']),
+# long - constant
+# object
+# property - constant
+ ("slice", "PySliceObject", "PySlice_Type", slice_methods, slice_members),
+# staticmethod
+# super
+# str - constant
+# tuple - constant
+ ("type", "PyTypeObject", "PyType_Type", []),
+# xrange - constant
+]
+
+getattr3_utility_code = ["""
+static PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/
+""","""
+static PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
+ PyObject *r = PyObject_GetAttr(o, n);
+ if (!r) {
+ if (!PyErr_ExceptionMatches(PyExc_AttributeError))
+ goto bad;
+ PyErr_Clear();
+ r = d;
+ Py_INCREF(d);
+ }
+ return r;
+bad:
+ return 0;
+}
+"""]
+
+builtin_utility_code = {
+ 'getattr3': getattr3_utility_code,
+}
+
+builtin_scope = BuiltinScope()
+
+def type_and_ctype(typecode, c_typecode = None):
+ type = Signature.format_map[typecode]
+ if c_typecode:
+ ctype = Signature.format_map[c_typecode]
+ else:
+ ctype = None
+ return type, ctype
+
+def declare_builtin_constant(name, typecode, cname):
+ type, ctype = type_and_ctype(*typecode.split("/"))
+ builtin_scope.declare_builtin_constant(name, type, cname, ctype)
+
+def declare_builtin_func(name, args, ret, cname, py_equiv = "*"):
+ sig = Signature(args, ret)
+ type = sig.function_type()
+ utility = builtin_utility_code.get(name)
+ builtin_scope.declare_builtin_cfunction(name, type, cname, py_equiv, utility)
+
+def declare_builtin_method(self_type, name, args, ret, cname):
+ sig = Signature(args, ret)
+ meth_type = sig.function_type(self_type)
+ self_type.scope.declare_builtin_method(name, meth_type, cname)
+
+def declare_builtin_member(self_type, name, typecode, cname = None):
+ member_type = Signature.format_map[typecode]
+ self_type.scope.declare_builtin_var(name, member_type, cname)
+
+def declare_builtin_c_type(name, type):
+ builtin_scope.declare_builtin_c_type(name, type)
+
+def declare_builtin_type(name, objstruct, typeobj, methods, members = [],
+ flags = []):
+ entry = builtin_scope.declare_builtin_class(name, objstruct, typeobj)
+ type = entry.type
+ for desc in methods:
+ declare_builtin_method(type, *desc)
+ for desc in members:
+ declare_builtin_member(type, *desc)
+ for flag in flags:
+ setattr(type, flag, 1)
+
+def init_builtin_constants():
+ for desc in builtin_constant_table:
+ declare_builtin_constant(*desc)
+
+def init_builtin_funcs():
+ for desc in builtin_function_table:
+ declare_builtin_func(*desc)
+
+def init_builtin_types():
+ for desc in builtin_c_type_table:
+ declare_builtin_c_type(*desc)
+ for desc in builtin_type_table:
+ declare_builtin_type(*desc)
+ py_type_type.define(builtin_scope.find_type("type"))
+
+def init_builtins():
+ init_builtin_constants()
+ init_builtin_funcs()
+ init_builtin_types()
+
+init_builtins()
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/CmdLine.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/CmdLine.py
new file mode 100644
index 00000000..bf546d6e
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/CmdLine.py
@@ -0,0 +1,94 @@
+#
+# Pyrex - Command Line Parsing
+#
+
+import sys
+from Filenames import pyx_suffixes
+from Pyrex.Utils import has_suffix
+
+usage = """\
+Usage: pyrexc [options] sourcefile...
+Options:
+ -v, --version Display version number of pyrex compiler
+ -l, --create-listing Write error messages to a listing file
+ -I, --include-dir <directory> Search for include files in named directory
+ -o, --output-file <filename> Specify name of generated C file
+ -r, --recursive Recursively find and compile dependencies
+ -t, --timestamps Only compile newer source files (implied with -r)
+ -f, --force Compile all source files (overrides implied -t)
+ -q, --quiet Don't print module names in recursive mode
+The following experimental options are supported only on MacOSX:
+ -C, --compile Compile generated .c file to .o file
+ -X, --link Link .o file to produce extension module (implies -C)
+ -+, --cplus Use C++ compiler for compiling and linking
+ Additional .o files to link may be supplied when using -X."""
+
+def bad_usage():
+ print >>sys.stderr, usage
+ sys.exit(1)
+
+def parse_command_line(args):
+ from Pyrex.Compiler.Main import \
+ CompilationOptions, default_options
+
+ def pop_arg():
+ if args:
+ return args.pop(0)
+ else:
+ bad_usage()
+
+ def get_param(option):
+ tail = option[2:]
+ if tail:
+ return tail
+ else:
+ return pop_arg()
+
+ options = CompilationOptions(default_options)
+ sources = []
+ while args:
+ if args[0].startswith("-"):
+ option = pop_arg()
+ if option in ("-v", "--version"):
+ options.show_version = 1
+ elif option in ("-l", "--create-listing"):
+ options.use_listing_file = 1
+ elif option in ("-C", "--compile"):
+ options.c_only = 0
+ elif option in ("-X", "--link"):
+ options.c_only = 0
+ options.obj_only = 0
+ elif option in ("-+", "--cplus"):
+ options.cplus = 1
+ elif option.startswith("-I"):
+ options.include_path.append(get_param(option))
+ elif option == "--include-dir":
+ options.include_path.append(pop_arg())
+ elif option in ("-o", "--output-file"):
+ options.output_file = pop_arg()
+ elif option in ("-r", "--recursive"):
+ options.recursive = 1
+ elif option in ("-t", "--timestamps"):
+ options.timestamps = 1
+ elif option in ("-f", "--force"):
+ options.timestamps = 0
+ else:
+ bad_usage()
+ else:
+ arg = pop_arg()
+ if has_suffix(arg, pyx_suffixes):
+ sources.append(arg)
+ elif arg.endswith(".o"):
+ options.objects.append(arg)
+ else:
+ print >>sys.stderr, \
+ "pyrexc: %s: Unknown filename suffix" % arg
+ if options.objects and len(sources) > 1:
+ print >>sys.stderr, \
+ "pyrexc: Only one source file allowed together with .o files"
+ if options.use_listing_file and len(sources) > 1:
+ print >>sys.stderr, \
+ "pyrexc: Only one source file allowed when using -o"
+ sys.exit(1)
+ return options, sources
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Code.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Code.py
new file mode 100644
index 00000000..a47692bb
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Code.py
@@ -0,0 +1,546 @@
+##########################################################################
+#
+# Pyrex - Code output module
+#
+##########################################################################
+
+import os, re
+import Naming
+from Pyrex.Utils import open_new_file
+from PyrexTypes import py_object_type, c_char_array_type, typecast
+
+identifier_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*$")
+max_intern_length = 30
+
+class CCodeWriter:
+ # f file output file
+ # level int indentation level
+ # bol bool beginning of line?
+ # marker string comment to emit before next line
+
+ def __init__(self, f):
+ #self.f = open_new_file(outfile_name)
+ self.f = f
+ self.level = 0
+ self.bol = 1
+ self.marker = None
+
+ def putln(self, code = ""):
+ if self.marker and self.bol:
+ self.emit_marker()
+ if code:
+ self.put(code)
+ self.f.write("\n");
+ self.bol = 1
+
+ def emit_marker(self):
+ self.f.write("\n");
+ self.indent()
+ self.f.write("/* %s */\n" % self.marker)
+ self.marker = None
+
+ def put(self, code):
+ dl = code.count("{") - code.count("}")
+ if dl < 0:
+ self.level += dl
+ if self.bol:
+ self.indent()
+ self.f.write(code)
+ self.bol = 0
+ if dl > 0:
+ self.level += dl
+
+ def increase_indent(self):
+ self.level = self.level + 1
+
+ def decrease_indent(self):
+ self.level = self.level - 1
+
+ def begin_block(self):
+ self.putln("{")
+ self.increase_indent()
+
+ def end_block(self):
+ self.decrease_indent()
+ self.putln("}")
+
+ def indent(self):
+ self.f.write(" " * self.level)
+
+ def mark_pos(self, pos):
+ file, line, col = pos
+ self.marker = '"%s":%s' % (file, line)
+
+ def put_var_declarations(self, entries, static = 0, dll_linkage = None,
+ definition = True):
+ for entry in entries:
+ if not entry.in_cinclude:
+ self.put_var_declaration(entry, static, dll_linkage, definition)
+
+ def put_var_declaration(self, entry, static = 0, dll_linkage = None,
+ definition = True):
+ #print "Code.put_var_declaration:", entry.name, repr(entry.type) ###
+ visibility = entry.visibility
+ if visibility == 'private' and not definition:
+ #print "...private and not definition, skipping" ###
+ return
+ if not entry.used and visibility == "private":
+ #print "not used and private, skipping" ###
+ return
+ storage_class = ""
+ if visibility == 'extern':
+ storage_class = Naming.extern_c_macro
+ elif visibility == 'public':
+ if not definition:
+ storage_class = Naming.extern_c_macro
+ elif visibility == 'private':
+ if static:
+ storage_class = "static"
+ if storage_class:
+ self.put("%s " % storage_class)
+ if visibility <> 'public':
+ dll_linkage = None
+ self.put(entry.type.declaration_code(entry.cname,
+ dll_linkage = dll_linkage))
+ if entry.init is not None:
+ self.put(" = %s" % entry.type.literal_code(entry.init))
+ self.putln(";")
+
+ def entry_as_pyobject(self, entry):
+ type = entry.type
+ if (not entry.is_self_arg and not entry.type.is_complete()) \
+ or (entry.type.is_extension_type and entry.type.base_type):
+ return "(PyObject *)" + entry.cname
+ else:
+ return entry.cname
+
+ def as_pyobject(self, cname, type):
+ if type:
+ return typecast(py_object_type, type, cname)
+ else:
+ return cname
+
+ def put_incref(self, cname, type = None):
+ self.putln("Py_INCREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_decref(self, cname, type = None):
+ self.putln("Py_DECREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_var_incref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_decref_clear(self, cname, type = None):
+ self.putln("Py_DECREF(%s); %s = 0;" % (
+ self.as_pyobject(cname, type), cname)) # What was wrong with this?
+ #typecast(py_object_type, type, cname), cname))
+
+ def put_xdecref(self, cname, type):
+ self.putln("Py_XDECREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_xdecref_clear(self, cname, type):
+ self.putln("Py_XDECREF(%s); %s = 0;" % (
+ self.as_pyobject(cname, type), cname))
+
+ def put_var_decref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_DECREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_var_decref_clear(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_DECREF(%s); %s = 0;" % (
+ self.entry_as_pyobject(entry), entry.cname))
+
+ def put_var_xdecref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_var_xdecref_clear(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_XDECREF(%s); %s = 0;" % (
+ self.entry_as_pyobject(entry), entry.cname))
+
+ def put_var_decrefs(self, entries, used_only = 0):
+ for entry in entries:
+ if not used_only or entry.used:
+ if entry.xdecref_cleanup:
+ self.put_var_xdecref(entry)
+ else:
+ self.put_var_decref(entry)
+
+ def put_var_xdecrefs(self, entries):
+ for entry in entries:
+ self.put_var_xdecref(entry)
+
+ def put_var_xdecrefs_clear(self, entries):
+ for entry in entries:
+ self.put_var_xdecref_clear(entry)
+
+ def put_init_to_py_none(self, cname, type):
+ py_none = typecast(type, py_object_type, "Py_None")
+ self.putln("%s = %s; Py_INCREF(Py_None);" % (cname, py_none))
+
+ def put_init_var_to_py_none(self, entry, template = "%s"):
+ code = template % entry.cname
+ self.put_init_to_py_none(code, entry.type)
+
+ def put_pymethoddef(self, entry, term):
+ if entry.doc:
+ doc_code = entry.doc_cname
+ else:
+ doc_code = 0
+ self.putln(
+ '{"%s", (PyCFunction)%s, METH_VARARGS|METH_KEYWORDS, %s}%s' % (
+ entry.name,
+ entry.func_cname,
+ doc_code,
+ term))
+
+ def put_h_guard(self, guard):
+ self.putln("#ifndef %s" % guard)
+ self.putln("#define %s" % guard)
+
+#--------------------------------------------------------------------------
+
+class MainCCodeWriter(CCodeWriter):
+ # Code writer for executable C code.
+ #
+ # global_state GlobalCodeState module-wide state
+ # return_label string function return point label
+ # error_label string error catch point label
+ # continue_label string loop continue point label
+ # break_label string loop break point label
+ # label_counter integer counter for naming labels
+ # in_try_finally boolean inside try of try...finally
+ # exc_vars (string * 3) exception vars for reraise, or None
+
+ in_try_finally = 0
+
+ def __init__(self, f, base = None):
+ CCodeWriter.__init__(self, f)
+ if base:
+ self.global_state = base.global_state
+ else:
+ self.global_state = GlobalCodeState()
+ self.label_counter = 1
+ self.error_label = None
+ self.exc_vars = None
+
+ def init_labels(self):
+ self.label_counter = 0
+ self.labels_used = {}
+ self.return_label = self.new_label()
+ self.new_error_label()
+ self.continue_label = None
+ self.break_label = None
+
+ def new_label(self):
+ n = self.label_counter
+ self.label_counter = n + 1
+ return "%s%d" % (Naming.label_prefix, n)
+
+ def new_error_label(self):
+ old_err_lbl = self.error_label
+ self.error_label = self.new_label()
+ return old_err_lbl
+
+ def get_loop_labels(self):
+ return (
+ self.continue_label,
+ self.break_label)
+
+ def set_loop_labels(self, labels):
+ (self.continue_label,
+ self.break_label) = labels
+
+ def new_loop_labels(self):
+ old_labels = self.get_loop_labels()
+ self.set_loop_labels(
+ (self.new_label(),
+ self.new_label()))
+ return old_labels
+
+ def get_all_labels(self):
+ return (
+ self.continue_label,
+ self.break_label,
+ self.return_label,
+ self.error_label)
+
+ def set_all_labels(self, labels):
+ (self.continue_label,
+ self.break_label,
+ self.return_label,
+ self.error_label) = labels
+
+ def all_new_labels(self):
+ old_labels = self.get_all_labels()
+ new_labels = []
+ for old_label in old_labels:
+ if old_label:
+ new_labels.append(self.new_label())
+ else:
+ new_labels.append(old_label)
+ self.set_all_labels(new_labels)
+ return old_labels
+
+ def use_label(self, lbl):
+ self.labels_used[lbl] = 1
+
+ def put_label(self, lbl):
+ if lbl in self.labels_used:
+ self.putln("%s:;" % lbl)
+
+ def put_goto(self, lbl):
+ self.use_label(lbl)
+ self.putln("goto %s;" % lbl)
+
+ def error_goto(self, pos):
+ lbl = self.error_label
+ self.use_label(lbl)
+ return "{%s; goto %s;}" % (
+ self.error_setup(pos),
+ lbl)
+
+ def error_setup(self, pos):
+ return "%s = %s[%s]; %s = %s" % (
+ Naming.filename_cname,
+ Naming.filetable_cname,
+ self.lookup_filename(pos[0]),
+ Naming.lineno_cname,
+ pos[1])
+
+ def lookup_filename(self, filename):
+ return self.global_state.lookup_filename(filename)
+
+ def use_utility_code(self, uc):
+ self.global_state.use_utility_code(uc)
+
+ def get_string_const(self, text):
+ # Get C name for a string constant, adding a new one
+ # if necessary.
+ return self.global_state.get_string_const(text).cname
+
+ def new_const(self, type):
+ # Get C name for a new precalculated value.
+ return self.global_state.new_const(type).cname
+
+ def get_py_string_const(self, text):
+ # Get C name for a Python string constant, adding a new one
+ # if necessary. If the string is name-like, it will be interned.
+ return self.global_state.get_py_string_const(text).cname
+
+ def intern(self, name):
+ return self.get_py_string_const(name)
+
+#--------------------------------------------------------------------------
+
+class StringConst:
+ # Info held by GlobalCodeState about a string constant.
+ #
+ # cname string
+ # text string
+ # py_const Const Corresponding Python string
+
+ py_const = None
+
+ def __init__(self, cname, text):
+ self.cname = cname
+ self.text = text
+
+#--------------------------------------------------------------------------
+
+class Const:
+ # Info held by GlobalCodeState about a precalculated value.
+ #
+ # cname string
+ # type PyrexType
+ # intern boolean for Python strings
+
+ intern = 0
+
+ def __init__(self, cname, type):
+ self.cname = cname
+ self.type = type
+
+#--------------------------------------------------------------------------
+
+class GlobalCodeState:
+ # State pertaining to code generation for a whole module.
+ #
+ # filename_table {string : int} for finding filename table indexes
+ # filename_list [string] filenames in filename table order
+ # utility_code {int : int} id to utility_list index
+ # utility_list list utility code used
+ # const_counter int for generating const names
+ # string_index {string : String} string constant index
+ # string_consts [StringConst] all string constants
+ # other_consts [Const] other precalculated values
+
+ def __init__(self):
+ self.filename_table = {}
+ self.filename_list = []
+ self.utility_code = {}
+ self.utility_list = []
+ self.const_counter = 1
+ self.string_index = {}
+ self.string_consts = []
+ self.other_consts = []
+
+ def lookup_filename(self, filename):
+ try:
+ index = self.filename_table[filename]
+ except KeyError:
+ index = len(self.filename_list)
+ self.filename_list.append(filename)
+ self.filename_table[filename] = index
+ return index
+
+ def generate_filename_table(self, code):
+ code.putln("")
+ code.putln("static char *%s[] = {" % Naming.filenames_cname)
+ if self.filename_list:
+ for filename in self.filename_list:
+ filename = os.path.basename(filename)
+ escaped_filename = filename.replace("\\", "\\\\").replace('"', r'\"')
+ code.putln('"%s",' %
+ escaped_filename)
+ else:
+ # Some C compilers don't like an empty array
+ code.putln("0")
+ code.putln("};")
+
+ def use_utility_code(self, uc):
+ i = id(uc)
+ if i not in self.utility_code:
+ self.utility_code[i] = len(self.utility_list)
+ self.utility_list.append(uc)
+
+ def generate_utility_functions(self, code):
+ code.putln("")
+ code.putln("/* Runtime support code */")
+ code.putln("")
+ code.putln("static void %s(void) {" % Naming.fileinit_cname)
+ code.putln("%s = %s;" %
+ (Naming.filetable_cname, Naming.filenames_cname))
+ code.putln("}")
+ for utility_code in self.utility_list:
+ code.h.put(utility_code[0])
+ code.put(utility_code[1])
+
+ def new_const_name(self):
+ # Create a new globally-unique name for a constant.
+ name = "%s%s" % (Naming.const_prefix, self.const_counter)
+ self.const_counter += 1
+ return name
+
+ def new_string_const(self, text):
+ # Add a new C string constant.
+ c = StringConst(self.new_const_name(), text)
+ self.string_consts.append(c)
+ self.string_index[text] = c
+ return c
+
+ def new_const(self, type, cname = None):
+ if not cname:
+ cname = self.new_const_name()
+ c = Const(cname, type)
+ self.other_consts.append(c)
+ return c
+
+ def new_py_const(self, cname = None, intern = 0):
+ # Add a new Python constant.
+ c = self.new_const(py_object_type, cname)
+ if intern:
+ c.intern = 1
+ return c
+
+ def get_string_const(self, text):
+ # Get a C string constant, adding a new one if necessary.
+ c = self.string_index.get(text)
+ if not c:
+ c = self.new_string_const(text)
+ return c
+
+ def get_py_string_const(self, text):
+ # Get a Python string constant, adding a new one if necessary.
+ # If the string is name-like, it will be interned.
+ s = self.get_string_const(text)
+ if not s.py_const:
+ intern = len(text) <= max_intern_length and identifier_pattern.match(text)
+ if intern:
+ cname = Naming.interned_prefix + text
+ else:
+ cname = s.cname + "p"
+ s.py_const = self.new_py_const(cname, intern)
+ return s.py_const
+
+ def generate_const_declarations(self, code):
+ self.generate_string_const_declarations(code)
+ self.generate_other_const_declarations(code)
+ self.generate_stringtab(code)
+
+ def generate_string_const_declarations(self, code):
+ code.putln("")
+ for c in self.string_consts:
+ code.putln('static char %s[] = "%s";' % (c.cname, c.text))
+
+ def generate_other_const_declarations(self, code):
+ interned = []
+ uninterned = []
+ for c in self.other_consts:
+ if c.intern:
+ interned.append(c)
+ else:
+ uninterned.append(c)
+ interned.sort(lambda c1, c2: cmp(c1.cname, c2.cname))
+ def put_consts(consts):
+ code.putln("")
+ for c in consts:
+ decl = c.type.declaration_code(c.cname)
+ code.putln("static %s;" % decl)
+ put_consts(interned)
+ put_consts(uninterned)
+
+ def generate_stringtab(self, code):
+ interned = []
+ uninterned = []
+ for s in self.string_consts:
+ p = s.py_const
+ if p:
+ if p.intern:
+ interned.append(s)
+ else:
+ uninterned.append(s)
+ interned.sort(lambda c1, c2: cmp(c1.py_const.cname, c2.py_const.cname))
+ def put_stringtab(consts, intern):
+ for c in consts:
+ cname = c.cname
+ code.putln("{&%s, %d, %s, sizeof(%s)}," % (
+ c.py_const.cname, intern, cname, cname))
+ code.putln("")
+ code.putln("static __Pyx_StringTabEntry %s[] = {" % Naming.stringtab_cname)
+ put_stringtab(interned, 1)
+ put_stringtab(uninterned, 0)
+ code.putln("{0, 0, 0, 0}")
+ code.putln("};")
+
+#--------------------------------------------------------------------------
+
+class PyrexCodeWriter:
+ # f file output file
+ # level int indentation level
+
+ def __init__(self, outfile_name):
+ self.f = open_new_file(outfile_name)
+ self.level = 0
+
+ def putln(self, code):
+ self.f.write("%s%s\n" % (" " * self.level, code))
+
+ def indent(self):
+ self.level += 1
+
+ def dedent(self):
+ self.level -= 1
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/DebugFlags.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/DebugFlags.py
new file mode 100644
index 00000000..e36e0bd2
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/DebugFlags.py
@@ -0,0 +1,4 @@
+debug_disposal_code = 0
+debug_temp_alloc = 0
+debug_coercion = 0
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Errors.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Errors.py
new file mode 100644
index 00000000..1eef3a33
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Errors.py
@@ -0,0 +1,77 @@
+#
+# Pyrex - Errors
+#
+
+import sys
+from Pyrex.Utils import open_new_file
+
+warnings_issued = {}
+
+class PyrexError(EnvironmentError):
+ pass
+
+
+class CompileError(PyrexError):
+
+ def __init__(self, position = None, message = ""):
+ self.position = position
+ if position:
+ pos_str = "%s:%d:%d: " % position
+ else:
+ pos_str = ""
+ PyrexError.__init__(self, pos_str + message)
+
+
+class InternalError(Exception):
+ # If this is ever raised, there is a bug in the compiler.
+
+ def __init__(self, message):
+ Exception.__init__(self, "Internal compiler error: %s"
+ % message)
+
+
+listing_file = None
+num_errors = 0
+echo_file = None
+
+def open_listing_file(path, echo_to_stderr = 1):
+ # Begin a new error listing. If path is None, no file
+ # is opened, the error counter is just reset.
+ global listing_file, num_errors, echo_file
+ if path is not None:
+ listing_file = open_new_file(path)
+ else:
+ listing_file = None
+ if echo_to_stderr:
+ echo_file = sys.stderr
+ else:
+ echo_file = None
+ num_errors = 0
+
+def close_listing_file():
+ global listing_file
+ if listing_file:
+ listing_file.close()
+ listing_file = None
+
+def report(position, message):
+ err = CompileError(position, message)
+ line = "%s\n" % err
+ if listing_file:
+ listing_file.write(line)
+ if echo_file:
+ echo_file.write(line)
+ return err
+
+def warning(position, message):
+ return report(position, "Warning: %s" % message)
+
+def one_time_warning(position, key, message):
+ if key not in warnings_issued:
+ warnings_issued[key] = 1
+ warning(position, message)
+
+def error(position, message):
+ global num_errors
+ num_errors = num_errors + 1
+ return report(position, message)
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ExprNodes.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ExprNodes.py
new file mode 100644
index 00000000..c2848286
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ExprNodes.py
@@ -0,0 +1,3954 @@
+#
+# Pyrex - Parse tree nodes for expressions
+#
+
+import operator
+from string import join
+
+from Errors import error, InternalError
+import Naming
+from Nodes import Node
+import PyrexTypes
+from PyrexTypes import py_object_type, c_long_type, typecast, error_type, \
+ CPtrType, CFuncType, COverloadedFuncType
+import Symtab
+import Options
+
+from Pyrex.Debugging import print_call_chain
+from DebugFlags import debug_disposal_code, debug_temp_alloc, \
+ debug_coercion
+
+class ExprNode(Node):
+ # subexprs [string] Class var holding names of subexpr node attrs
+ # type PyrexType Type of the result
+ # result_code string Code fragment
+ # result_ctype string C type of result_code if different from type
+ # inplace_result string Temp var holding in-place operation result
+ # is_temp boolean Result is in a temporary variable
+ # is_sequence_constructor
+ # boolean Is a list or tuple constructor expression
+ # saved_subexpr_nodes
+ # [ExprNode or [ExprNode or None] or None]
+ # Cached result of subexpr_nodes()
+
+ result_ctype = None
+
+ # The Analyse Expressions phase for expressions is split
+ # into two sub-phases:
+ #
+ # Analyse Types
+ # Determines the result type of the expression based
+ # on the types of its sub-expressions, and inserts
+ # coercion nodes into the expression tree where needed.
+ # Marks nodes which will need to have temporary variables
+ # allocated.
+ #
+ # Allocate Temps
+ # Allocates temporary variables where needed, and fills
+ # in the result_code field of each node.
+ #
+ # ExprNode provides some convenience routines which
+ # perform both of the above phases. These should only
+ # be called from statement nodes, and only when no
+ # coercion nodes need to be added around the expression
+ # being analysed. If coercion is needed, the above two phases
+ # should be invoked separately.
+ #
+ # Framework code in ExprNode provides much of the common
+ # processing for the various phases. It makes use of the
+ # 'subexprs' class attribute of ExprNodes, which should
+ # contain a list of the names of attributes which can
+ # hold sub-nodes or sequences of sub-nodes.
+ #
+ # The framework makes use of a number of abstract methods.
+ # Their responsibilities are as follows.
+ #
+ # Declaration Analysis phase
+ #
+ # analyse_target_declaration
+ # Called during the Analyse Declarations phase to analyse
+ # the LHS of an assignment or argument of a del statement.
+ # Nodes which cannot be the LHS of an assignment need not
+ # implement it.
+ #
+ # Expression Analysis phase
+ #
+ # analyse_types
+ # - Call analyse_types on all sub-expressions.
+ # - Check operand types, and wrap coercion nodes around
+ # sub-expressions where needed.
+ # - Set the type of this node.
+ # - If a temporary variable will be required for the
+ # result, set the is_temp flag of this node.
+ #
+ # analyse_target_types
+ # Called during the Analyse Types phase to analyse
+ # the LHS of an assignment or argument of a del
+ # statement. Similar responsibilities to analyse_types.
+ #
+ # allocate_temps
+ # - Call allocate_temps for all sub-nodes.
+ # - Call allocate_temp for this node.
+ # - If a temporary was allocated, call release_temp on
+ # all sub-expressions.
+ #
+ # allocate_target_temps
+ # - Call allocate_temps on sub-nodes and allocate any other
+ # temps used during assignment.
+ # - Fill in result_code with a C lvalue if needed.
+ # - If a rhs node is supplied, call release_temp on it.
+ # - Call release_temp on sub-nodes and release any other
+ # temps used during assignment.
+ #
+ # #calculate_result_code
+ # # - Called during the Allocate Temps phase. Should return a
+ # # C code fragment evaluating to the result. This is only
+ # # called when the result is not a temporary.
+ #
+ # target_code
+ # Called by the default implementation of allocate_target_temps.
+ # Should return a C lvalue for assigning to the node. The default
+ # implementation calls calculate_result_code.
+ #
+ # check_const
+ # - Check that this node and its subnodes form a
+ # legal constant expression. If so, do nothing,
+ # otherwise call not_const.
+ #
+ # The default implementation of check_const
+ # assumes that the expression is not constant.
+ #
+ # check_const_addr
+ # - Same as check_const, except check that the
+ # expression is a C lvalue whose address is
+ # constant. Otherwise, call addr_not_const.
+ #
+ # The default implementation of calc_const_addr
+ # assumes that the expression is not a constant
+ # lvalue.
+ #
+ # Code Generation phase
+ #
+ # generate_evaluation_code
+ # 1. Call generate_evaluation_code for sub-expressions.
+ # 2. Generate any C statements necessary to calculate
+ # the result of this node from the results of its
+ # sub-expressions. If result is not in a temporary, record
+ # any information that will be needed by this node's
+ # implementation of calculate_result_code().
+ # 4. If result is in a temporary, call generate_disposal_code
+ # on all sub-expressions.
+ #
+ # A default implementation of generate_evaluation_code
+ # is provided which uses the folling abstract methods:
+ # generate_result_code (for no. 2)
+ #
+ # generate_assignment_code
+ # Called on the LHS of an assignment.
+ # - Call generate_evaluation_code for sub-expressions.
+ # - Generate code to perform the assignment.
+ # - If the assignment absorbed a reference, call
+ # generate_post_assignment_code on the RHS,
+ # otherwise call generate_disposal_code on it.
+ #
+ # generate_deletion_code
+ # Called on an argument of a del statement.
+ # - Call generate_evaluation_code for sub-expressions.
+ # - Generate code to perform the deletion.
+ # - Call generate_disposal_code on all sub-expressions.
+ #
+ # calculate_result_code
+ # Return a C code fragment representing the result of this node.
+ # This is only called if the result is not in a temporary.
+ #
+
+ is_sequence_constructor = 0
+ is_attribute = 0
+
+ saved_subexpr_nodes = None
+ is_temp = 0
+
+ def not_implemented(self, method_name):
+ print_call_chain(method_name, "not implemented") ###
+ raise InternalError(
+ "%s.%s not implemented" %
+ (self.__class__.__name__, method_name))
+
+ def is_lvalue(self):
+ return 0
+
+ def is_inplace_lvalue(self):
+ return 0
+
+ def is_ephemeral(self):
+ # An ephemeral node is one whose result is in
+ # a Python temporary and we suspect there are no
+ # other references to it. Certain operations are
+ # disallowed on such values, since they are
+ # likely to result in a dangling pointer.
+ return self.type.is_pyobject and self.is_temp
+
+ def subexpr_nodes(self):
+ # Extract a list of subexpression nodes based
+ # on the contents of the subexprs class attribute.
+ if self.saved_subexpr_nodes is None:
+ nodes = []
+ for name in self.subexprs:
+ item = getattr(self, name)
+ if item:
+ if isinstance(item, ExprNode):
+ nodes.append(item)
+ else:
+ nodes.extend(item)
+ self.saved_subexpr_nodes = nodes
+ return self.saved_subexpr_nodes
+
+ def result(self):
+ # Return a C code fragment for the result of this node.
+ if self.is_temp:
+ result_code = self.result_code
+ else:
+ result_code = self.calculate_result_code()
+ return result_code
+
+ def result_as(self, type = None):
+ # Return the result code cast to the specified C type.
+ return typecast(type, self.ctype(), self.result())
+
+ def py_result(self):
+ # Return the result code cast to PyObject *.
+ return self.result_as(py_object_type)
+
+ def ctype(self):
+ # Return the native C type of the result.
+ return self.result_ctype or self.type
+
+ def compile_time_value(self, denv):
+ # Return value of compile-time expression, or report error.
+ error(self.pos, "Invalid compile-time expression")
+
+ def compile_time_value_error(self, e):
+ error(self.pos, "Error in compile-time expression: %s: %s" % (
+ e.__class__.__name__, e))
+
+ # ------------- Declaration Analysis ----------------
+
+ def analyse_target_declaration(self, env):
+ error(self.pos, "Cannot assign to or delete this")
+
+ # ------------- Expression Analysis ----------------
+
+ def analyse_const_expression(self, env):
+ # Called during the analyse_declarations phase of a
+ # constant expression. Analyses the expression's type,
+ # checks whether it is a legal const expression,
+ # and determines its value.
+ self.analyse_types(env)
+ self.allocate_temps(env)
+ self.check_const()
+
+ def analyse_expressions(self, env):
+ # Convenience routine performing both the Type
+ # Analysis and Temp Allocation phases for a whole
+ # expression.
+ self.analyse_types(env)
+ self.allocate_temps(env)
+
+ def analyse_target_expression(self, env, rhs):
+ # Convenience routine performing both the Type
+ # Analysis and Temp Allocation phases for the LHS of
+ # an assignment.
+ self.analyse_target_types(env)
+ self.allocate_target_temps(env, rhs)
+
+ def analyse_boolean_expression(self, env):
+ # Analyse expression and coerce to a boolean.
+ self.analyse_types(env)
+ bool = self.coerce_to_boolean(env)
+ bool.allocate_temps(env)
+ return bool
+
+ def analyse_temp_boolean_expression(self, env):
+ # Analyse boolean expression and coerce result into
+ # a temporary. This is used when a branch is to be
+ # performed on the result and we won't have an
+ # opportunity to ensure disposal code is executed
+ # afterwards. By forcing the result into a temporary,
+ # we ensure that all disposal has been done by the
+ # time we get the result.
+ self.analyse_types(env)
+ bool = self.coerce_to_boolean(env)
+ temp_bool = bool.coerce_to_temp(env)
+ temp_bool.allocate_temps(env)
+ return temp_bool
+
+ # --------------- Type Analysis ------------------
+
+ def analyse_as_function(self, env):
+ # Analyse types for an expression that is to be called.
+ self.analyse_types(env)
+
+ def analyse_as_module(self, env):
+ # If this node can be interpreted as a reference to a
+ # cimported module, return its scope, else None.
+ return None
+
+ def analyse_as_extension_type(self, env):
+ # If this node can be interpreted as a reference to an
+ # extension type, return its type, else None.
+ return None
+
+ def analyse_as_cimported_attribute(self, env, *args, **kwds):
+ # If this node can be interpreted as a cimported name,
+ # finish type analysis and return true, else return false.
+ return 0
+
+ def analyse_types(self, env):
+ self.not_implemented("analyse_types")
+
+ def analyse_target_types(self, env):
+ self.analyse_types(env)
+
+ def analyse_inplace_types(self, env):
+ if self.is_inplace_lvalue():
+ self.analyse_types(env)
+ else:
+ error(self.pos, "Invalid target for in-place operation")
+ self.type = error_type
+
+ def gil_assignment_check(self, env):
+ if env.nogil and self.type.is_pyobject:
+ error(self.pos, "Assignment of Python object not allowed without gil")
+
+ def check_const(self):
+ self.not_const()
+
+ def not_const(self):
+ error(self.pos, "Not allowed in a constant expression")
+
+ def check_const_addr(self):
+ self.addr_not_const()
+
+ def addr_not_const(self):
+ error(self.pos, "Address is not constant")
+
+ def gil_check(self, env):
+ if env.nogil and self.type.is_pyobject:
+ self.gil_error()
+
+ # ----------------- Result Allocation -----------------
+
+ def result_in_temp(self):
+ # Return true if result is in a temporary owned by
+ # this node or one of its subexpressions. Overridden
+ # by certain nodes which can share the result of
+ # a subnode.
+ return self.is_temp
+
+ def allocate_target_temps(self, env, rhs, inplace = 0):
+ # Perform temp allocation for the LHS of an assignment.
+ if debug_temp_alloc:
+ print self, "Allocating target temps"
+ self.allocate_subexpr_temps(env)
+ #self.result_code = self.target_code()
+ if rhs:
+ rhs.release_temp(env)
+ self.release_subexpr_temps(env)
+
+ def allocate_inplace_target_temps(self, env, rhs):
+ if debug_temp_alloc:
+ print self, "Allocating inplace target temps"
+ self.allocate_subexpr_temps(env)
+ #self.result_code = self.target_code()
+ py_inplace = self.type.is_pyobject
+ if py_inplace:
+ self.allocate_temp(env)
+ self.inplace_result = env.allocate_temp(py_object_type)
+ self.release_temp(env)
+ rhs.release_temp(env)
+ if py_inplace:
+ env.release_temp(self.inplace_result)
+ self.release_subexpr_temps(env)
+
+ def allocate_temps(self, env, result = None):
+ # Allocate temporary variables for this node and
+ # all its sub-expressions. If a result is specified,
+ # this must be a temp node and the specified variable
+ # is used as the result instead of allocating a new
+ # one.
+ if debug_temp_alloc:
+ print self, "Allocating temps"
+ self.allocate_subexpr_temps(env)
+ self.allocate_temp(env, result)
+ if self.is_temp:
+ self.release_subexpr_temps(env)
+
+ def allocate_subexpr_temps(self, env):
+ # Allocate temporary variables for all sub-expressions
+ # of this node.
+ if debug_temp_alloc:
+ print self, "Allocating temps for:", self.subexprs
+ for node in self.subexpr_nodes():
+ if node:
+ if debug_temp_alloc:
+ print self, "Allocating temps for", node
+ node.allocate_temps(env)
+
+ def allocate_temp(self, env, result = None):
+ # If this node requires a temporary variable for its
+ # result, allocate one. If a result is specified,
+ # this must be a temp node and the specified variable
+ # is used as the result instead of allocating a new
+ # one.
+ if debug_temp_alloc:
+ print self, "Allocating temp"
+ if result:
+ if not self.is_temp:
+ raise InternalError("Result forced on non-temp node")
+ self.result_code = result
+ elif self.is_temp:
+ type = self.type
+ if not type.is_void:
+ if type.is_pyobject:
+ type = PyrexTypes.py_object_type
+ self.result_code = env.allocate_temp(type)
+ else:
+ self.result_code = None
+ if debug_temp_alloc:
+ print self, "Allocated result", self.result_code
+ #else:
+ # self.result_code = self.calculate_result_code()
+
+ def target_code(self):
+ # Return code fragment for use as LHS of a C assignment.
+ return self.calculate_result_code()
+
+ def calculate_result_code(self):
+ self.not_implemented("calculate_result_code")
+
+ def release_temp(self, env):
+ # If this node owns a temporary result, release it,
+ # otherwise release results of its sub-expressions.
+ if self.is_temp:
+ if debug_temp_alloc:
+ print self, "Releasing result", self.result_code
+ env.release_temp(self.result_code)
+ else:
+ self.release_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ # Release the results of all sub-expressions of
+ # this node.
+ for node in self.subexpr_nodes():
+ if node:
+ node.release_temp(env)
+
+ # ---------------- Code Generation -----------------
+
+ def mark_vars_used(self):
+ for node in self.subexpr_nodes():
+ node.mark_vars_used()
+
+ def make_owned_reference(self, code):
+ # If result is a pyobject, make sure we own
+ # a reference to it.
+ if self.type.is_pyobject and not self.result_in_temp():
+ code.put_incref(self.py_result())
+
+ def generate_evaluation_code(self, code):
+ # Generate code to evaluate this node and
+ # its sub-expressions, and dispose of any
+ # temporary results of its sub-expressions.
+ self.generate_subexpr_evaluation_code(code)
+ self.generate_result_code(code)
+ if self.is_temp:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_subexpr_evaluation_code(self, code):
+ for node in self.subexpr_nodes():
+ node.generate_evaluation_code(code)
+
+ def generate_result_code(self, code):
+ self.not_implemented("generate_result_code")
+
+ inplace_functions = {
+ "+=": "PyNumber_InPlaceAdd",
+ "-=": "PyNumber_InPlaceSubtract",
+ "*=": "PyNumber_InPlaceMultiply",
+ "/=": "PyNumber_InPlaceDivide",
+ "%=": "PyNumber_InPlaceRemainder",
+ "**=": "PyNumber_InPlacePower",
+ "<<=": "PyNumber_InPlaceLshift",
+ ">>=": "PyNumber_InPlaceRshift",
+ "&=": "PyNumber_InPlaceAnd",
+ "^=": "PyNumber_InPlaceXor",
+ "|=": "PyNumber_InPlaceOr",
+ }
+
+ def generate_inplace_operation_code(self, operator, rhs, code):
+ args = (self.py_result(), rhs.py_result())
+ if operator == "**=":
+ arg_code = "%s, %s, Py_None" % args
+ else:
+ arg_code = "%s, %s" % args
+ code.putln("%s = %s(%s); if (!%s) %s" % (
+ self.inplace_result,
+ self.inplace_functions[operator],
+ arg_code,
+ self.inplace_result,
+ code.error_goto(self.pos)))
+ if self.is_temp:
+ code.put_decref_clear(self.py_result())
+ rhs.generate_disposal_code(code)
+ if self.type.is_extension_type:
+ code.putln(
+ "if (!__Pyx_TypeTest(%s, %s)) %s" % (
+ self.inplace_result,
+ self.type.typeptr_cname,
+ code.error_goto(self.pos)))
+
+ def generate_disposal_code(self, code):
+ # If necessary, generate code to dispose of
+ # temporary Python reference.
+ if self.is_temp:
+ if self.type.is_pyobject:
+ code.put_decref_clear(self.py_result(), self.ctype())
+ else:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_subexpr_disposal_code(self, code):
+ # Generate code to dispose of temporary results
+ # of all sub-expressions.
+ for node in self.subexpr_nodes():
+ node.generate_disposal_code(code)
+
+ def generate_post_assignment_code(self, code):
+ # Same as generate_disposal_code except that
+ # assignment will have absorbed a reference to
+ # the result if it is a Python object.
+ if self.is_temp:
+ if self.type.is_pyobject:
+ code.putln("%s = 0;" % self.result())
+ else:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_inplace_result_disposal_code(self, code):
+ code.put_decref_clear(self.inplace_result, py_object_type)
+
+ def generate_assignment_code(self, rhs, code):
+ # Stub method for nodes which are not legal as
+ # the LHS of an assignment. An error will have
+ # been reported earlier.
+ pass
+
+ def generate_deletion_code(self, code):
+ # Stub method for nodes that are not legal as
+ # the argument of a del statement. An error
+ # will have been reported earlier.
+ pass
+
+ # ----------------- Coercion ----------------------
+
+ def coerce_to(self, dst_type, env):
+ # Coerce the result so that it can be assigned to
+ # something of type dst_type. If processing is necessary,
+ # wraps this node in a coercion node and returns that.
+ # Otherwise, returns this node unchanged.
+ #
+ # This method is called during the analyse_expressions
+ # phase of the src_node's processing.
+ src = self
+ src_type = self.type
+ src_is_py_type = src_type.is_pyobject
+ dst_is_py_type = dst_type.is_pyobject
+
+ if dst_type.is_pyobject:
+ if not src.type.is_pyobject:
+ src = CoerceToPyTypeNode(src, env)
+ if not src.type.subtype_of(dst_type):
+ if not isinstance(src, NoneNode):
+ src = PyTypeTestNode(src, dst_type, env)
+ elif src.type.is_pyobject:
+ src = CoerceFromPyTypeNode(dst_type, src, env)
+ else: # neither src nor dst are py types
+ if not dst_type.assignable_from(src_type):
+ error(self.pos, "Cannot assign type '%s' to '%s'" %
+ (src.type, dst_type))
+ return src
+
+ def coerce_to_pyobject(self, env):
+ return self.coerce_to(PyrexTypes.py_object_type, env)
+
+ def coerce_to_boolean(self, env):
+ # Coerce result to something acceptable as
+ # a boolean value.
+ type = self.type
+ if type.is_pyobject or type.is_ptr or type.is_float:
+ return CoerceToBooleanNode(self, env)
+ else:
+ if not type.is_int and not type.is_error:
+ error(self.pos,
+ "Type '%s' not acceptable as a boolean" % type)
+ return self
+
+ def coerce_to_integer(self, env):
+ # If not already some C integer type, coerce to longint.
+ if self.type.is_int:
+ return self
+ else:
+ return self.coerce_to(PyrexTypes.c_long_type, env)
+
+ def coerce_to_temp(self, env):
+ # Ensure that the result is in a temporary.
+ if self.result_in_temp():
+ return self
+ else:
+ return CoerceToTempNode(self, env)
+
+ def coerce_to_simple(self, env):
+ # Ensure that the result is simple (see is_simple).
+ if self.is_simple():
+ return self
+ else:
+ return self.coerce_to_temp(env)
+
+ def is_simple(self):
+ # A node is simple if its result is something that can
+ # be referred to without performing any operations, e.g.
+ # a constant, local var, C global var, struct member
+ # reference, or temporary.
+ return self.result_in_temp()
+
+
+class AtomicExprNode(ExprNode):
+ # Abstract base class for expression nodes which have
+ # no sub-expressions.
+
+ subexprs = []
+
+
+class PyConstNode(AtomicExprNode):
+ # Abstract base class for constant Python values.
+
+ def is_simple(self):
+ return 1
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+
+ def calculate_result_code(self):
+ return self.value
+
+ def generate_result_code(self, code):
+ pass
+
+
+class NoneNode(PyConstNode):
+ # The constant value None
+
+ value = "Py_None"
+
+ def compile_time_value(self, denv):
+ return None
+
+
+class EllipsisNode(PyConstNode):
+ # '...' in a subscript list.
+
+ value = "Py_Ellipsis"
+
+ def compile_time_value(self, denv):
+ return Ellipsis
+
+
+class ConstNode(AtomicExprNode):
+ # Abstract base type for literal constant nodes.
+ #
+ # value string C code fragment
+
+ is_literal = 1
+
+ def is_simple(self):
+ return 1
+
+ def analyse_types(self, env):
+ pass # Types are held in class variables
+
+ def check_const(self):
+ pass
+
+ def calculate_result_code(self):
+ return str(self.value)
+
+ def generate_result_code(self, code):
+ pass
+
+
+class NullNode(ConstNode):
+ type = PyrexTypes.c_null_ptr_type
+ value = "NULL"
+
+
+class CharNode(ConstNode):
+ type = PyrexTypes.c_char_type
+
+ def compile_time_value(self, denv):
+ return ord(self.value)
+
+ def calculate_result_code(self):
+ return "'%s'" % self.value
+
+
+class IntNode(ConstNode):
+ type = PyrexTypes.c_long_type
+
+ def compile_time_value(self, denv):
+ return int(self.value, 0)
+
+
+class FloatNode(ConstNode):
+ type = PyrexTypes.c_double_type
+
+ def compile_time_value(self, denv):
+ return float(self.value)
+
+ def calculate_result_code(self):
+ strval = str(self.value)
+ if strval == 'nan':
+ return "NAN"
+ elif strval == 'inf':
+ return "INFINITY"
+ elif strval == '-inf':
+ return "(-INFINITY)"
+ else:
+ return strval
+
+
+class StringNode(ConstNode):
+ # #entry Symtab.Entry
+
+ type = PyrexTypes.c_char_ptr_type
+
+ def compile_time_value(self, denv):
+ return eval('"%s"' % self.value)
+
+# def analyse_types(self, env):
+# self.entry = env.add_string_const(self.value)
+
+ def coerce_to(self, dst_type, env):
+ # Arrange for a Python version of the string to be pre-allocated
+ # when coercing to a Python type.
+ if dst_type.is_pyobject and not self.type.is_pyobject:
+ node = self.as_py_string_node(env)
+ else:
+ node = self
+ # We still need to perform normal coerce_to processing on the
+ # result, because we might be coercing to an extension type,
+ # in which case a type test node will be needed.
+ return ConstNode.coerce_to(node, dst_type, env)
+
+ def as_py_string_node(self, env):
+ # Return a new StringNode with the same value as this node
+ # but whose type is a Python type instead of a C type.
+ #entry = self.entry
+ #env.add_py_string(entry)
+ return StringNode(self.pos, type = py_object_type, value = self.value)
+
+ def generate_evaluation_code(self, code):
+ if self.type.is_pyobject:
+ self.result_code = code.get_py_string_const(self.value)
+ else:
+ self.result_code = code.get_string_const(self.value)
+
+ def calculate_result_code(self):
+ return self.result_code
+
+
+class LongNode(AtomicExprNode):
+ # Python long integer literal
+ #
+ # value string
+
+ def compile_time_value(self, denv):
+ return long(self.value)
+
+ gil_message = "Constructing Python long int"
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ def generate_evaluation_code(self, code):
+ result = self.result()
+ code.putln(
+ '%s = PyLong_FromString("%s", 0, 0); if (!%s) %s' % (
+ self.result(),
+ self.value,
+ self.result(),
+ code.error_goto(self.pos)))
+
+
+class ImagNode(AtomicExprNode):
+ # Imaginary number literal
+ #
+ # value float imaginary part
+
+ def compile_time_value(self, denv):
+ return complex(0.0, self.value)
+
+ gil_message = "Constructing complex number"
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ def generate_evaluation_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyComplex_FromDoubles(0.0, %s); if (!%s) %s" % (
+ self.result(),
+ self.value,
+ self.result(),
+ code.error_goto(self.pos)))
+
+
+class NameNode(AtomicExprNode):
+ # Reference to a local or global variable name.
+ #
+ # name string Python name of the variable
+ #
+ # entry Entry Symbol table entry
+ # type_entry Entry For extension type names, the original type entry
+ # interned_cname string
+
+ is_name = 1
+ entry = None
+ type_entry = None
+
+ def compile_time_value(self, denv):
+ try:
+ return denv.lookup(self.name)
+ except KeyError:
+ error(self.pos, "Compile-time name '%s' not defined" % self.name)
+
+ def coerce_to(self, dst_type, env):
+ # If coercing to a generic pyobject and this is a builtin
+ # C function with a Python equivalent, manufacture a NameNode
+ # referring to the Python builtin.
+ #print "NameNode.coerce_to:", self.name, dst_type ###
+ if dst_type is py_object_type:
+ entry = self.entry
+ if entry.is_cfunction:
+ var_entry = entry.as_variable
+ if var_entry:
+ node = NameNode(self.pos, name = self.name)
+ node.entry = var_entry
+ node.analyse_rvalue_entry(env)
+ return node
+ return AtomicExprNode.coerce_to(self, dst_type, env)
+
+ def analyse_as_module(self, env):
+ # Try to interpret this as a reference to a cimported module.
+ # Returns the module scope, or None.
+ entry = env.lookup(self.name)
+ if entry and entry.as_module:
+ return entry.as_module
+ return None
+
+ def analyse_as_extension_type(self, env):
+ # Try to interpret this as a reference to an extension type.
+ # Returns the extension type, or None.
+ entry = env.lookup(self.name)
+ if entry and entry.is_type and entry.type.is_extension_type:
+ return entry.type
+ else:
+ return None
+
+ def analyse_target_declaration(self, env):
+ self.entry = env.lookup_here(self.name)
+ if not self.entry:
+ self.entry = env.declare_var(self.name, py_object_type, self.pos)
+
+ def analyse_types(self, env):
+ self.lookup_entry(env)
+ self.analyse_rvalue_entry(env)
+
+ def lookup_entry(self, env):
+ self.entry = env.lookup(self.name)
+ if not self.entry:
+ self.entry = env.declare_builtin(self.name, self.pos)
+
+ def analyse_target_types(self, env):
+ self.analyse_entry(env)
+ self.finish_analysing_lvalue()
+
+ def analyse_inplace_types(self, env):
+ self.analyse_rvalue_entry(env)
+ self.finish_analysing_lvalue()
+
+ def finish_analysing_lvalue(self):
+ if self.entry.is_readonly:
+ error(self.pos, "Assignment to read-only name '%s'"
+ % self.name)
+ elif not self.is_lvalue():
+ error(self.pos, "Assignment to non-lvalue '%s'"
+ % self.name)
+ self.type = PyrexTypes.error_type
+ self.entry.used = 1
+
+ def analyse_as_function(self, env):
+ self.lookup_entry(env)
+ if self.entry.is_type:
+ self.analyse_constructor_entry(env)
+ else:
+ self.analyse_rvalue_entry(env)
+
+ def analyse_constructor_entry(self, env):
+ entry = self.entry
+ type = entry.type
+ if type.is_struct_or_union:
+ self.type = entry.type.cplus_constructor_type
+ elif type.is_pyobject:
+ self.analyse_rvalue_entry(env)
+ else:
+ error(self.pos, "Type '%s' not callable as a C++ constructor" % type)
+ self.type = error_type
+
+ def analyse_rvalue_entry(self, env):
+ #print "NameNode.analyse_rvalue_entry:", self.name ###
+ #print "Entry:", self.entry.__dict__ ###
+ self.analyse_entry(env)
+ entry = self.entry
+ if entry.is_declared_generic:
+ self.result_ctype = py_object_type
+ if entry.is_pyglobal or entry.is_builtin:
+ self.is_temp = 1
+ self.gil_check(env)
+
+ gil_message = "Accessing Python global or builtin"
+
+ def analyse_entry(self, env):
+ #print "NameNode.analyse_entry:", self.name ###
+ self.check_identifier_kind()
+ entry = self.entry
+ type = entry.type
+ ctype = entry.ctype
+ self.type = type
+ if ctype:
+ self.result_ctype = ctype
+ if entry.is_pyglobal or entry.is_builtin:
+ assert type.is_pyobject, "Python global or builtin not a Python object"
+ #self.interned_cname = env.intern(self.entry.name)
+
+ def check_identifier_kind(self):
+ # Check that this is an appropriate kind of name for use in an expression.
+ # Also finds the variable entry associated with an extension type.
+ entry = self.entry
+ if entry.is_type and entry.type.is_extension_type:
+ self.type_entry = entry
+ if not (entry.is_const or entry.is_variable
+ or entry.is_builtin or entry.is_cfunction):
+ if self.entry.as_variable:
+ self.entry = self.entry.as_variable
+ else:
+ error(self.pos,
+ "'%s' is not a constant, variable or function identifier" % self.name)
+
+ def is_simple(self):
+ # If it's not a C variable, it'll be in a temp.
+ return 1
+
+ def calculate_target_results(self, env):
+ pass
+
+ def check_const(self):
+ entry = self.entry
+ if not (entry.is_const or entry.is_cfunction):
+ self.not_const()
+
+ def check_const_addr(self):
+ entry = self.entry
+ if not (entry.is_cglobal or entry.is_cfunction):
+ self.addr_not_const()
+
+ def is_lvalue(self):
+ entry = self.entry
+ return entry.is_variable and \
+ not entry.type.is_array and \
+ not entry.is_readonly
+
+ def is_inplace_lvalue(self):
+ return self.is_lvalue()
+
+ def is_ephemeral(self):
+ # Name nodes are never ephemeral, even if the
+ # result is in a temporary.
+ return 0
+
+ def allocate_temp(self, env, result = None):
+ AtomicExprNode.allocate_temp(self, env, result)
+ entry = self.entry
+ if entry:
+ entry.used = 1
+
+ def calculate_result_code(self):
+ entry = self.entry
+ if not entry:
+ return "<error>" # There was an error earlier
+ return entry.cname
+
+ def generate_result_code(self, code):
+ assert hasattr(self, 'entry')
+ entry = self.entry
+ if entry is None:
+ return # There was an error earlier
+ if entry.utility_code:
+ code.use_utility_code(entry.utility_code)
+ if entry.is_pyglobal or entry.is_builtin:
+ if entry.is_builtin:
+ namespace = Naming.builtins_cname
+ else: # entry.is_pyglobal
+ namespace = entry.namespace_cname
+ result = self.result()
+ cname = code.intern(self.entry.name)
+ code.use_utility_code(get_name_interned_utility_code)
+ code.putln(
+ '%s = __Pyx_GetName(%s, %s); if (!%s) %s' % (
+ result,
+ namespace,
+ cname,
+ result,
+ code.error_goto(self.pos)))
+
+ def generate_setattr_code(self, value_code, code):
+ entry = self.entry
+ namespace = self.entry.namespace_cname
+ cname = code.intern(self.entry.name)
+ code.putln(
+ 'if (PyObject_SetAttr(%s, %s, %s) < 0) %s' % (
+ namespace,
+ cname,
+ value_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ #print "NameNode.generate_assignment_code:", self.name ###
+ entry = self.entry
+ if entry is None:
+ return # There was an error earlier
+ if entry.is_pyglobal:
+ self.generate_setattr_code(rhs.py_result(), code)
+ if debug_disposal_code:
+ print "NameNode.generate_assignment_code:"
+ print "...generating disposal code for", rhs
+ rhs.generate_disposal_code(code)
+ else:
+ if self.type.is_pyobject:
+ rhs.make_owned_reference(code)
+ code.put_decref(self.py_result())
+ code.putln('%s = %s;' % (self.result(), rhs.result_as(self.ctype())))
+ if debug_disposal_code:
+ print "NameNode.generate_assignment_code:"
+ print "...generating post-assignment code for", rhs
+ rhs.generate_post_assignment_code(code)
+
+ def generate_inplace_assignment_code(self, operator, rhs, code):
+ entry = self.entry
+ if entry is None:
+ return # There was an error earlier
+ if self.type.is_pyobject:
+ self.generate_result_code(code)
+ self.generate_inplace_operation_code(operator, rhs, code)
+ if entry.is_pyglobal:
+ self.generate_setattr_code(self.inplace_result, code)
+ self.generate_inplace_result_disposal_code(code)
+ else:
+ code.put_decref(self.py_result())
+ cast_inplace_result = typecast(self.ctype(), py_object_type, self.inplace_result)
+ code.putln('%s = %s;' % (self.result(), cast_inplace_result))
+ else:
+ code.putln("%s %s %s;" % (self.result(), operator, rhs.result()))
+ rhs.generate_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ if self.entry is None:
+ return # There was an error earlier
+ if not self.entry.is_pyglobal:
+ error(self.pos, "Deletion of local or C global name not supported")
+ return
+ cname = code.intern(self.entry.name)
+ code.putln(
+ 'if (PyObject_DelAttr(%s, %s) < 0) %s' % (
+ Naming.module_cname,
+ cname,
+ code.error_goto(self.pos)))
+
+ def mark_vars_used(self):
+ if self.entry:
+ self.entry.used = 1
+
+
+class BackquoteNode(ExprNode):
+ # `expr`
+ #
+ # arg ExprNode
+
+ subexprs = ['arg']
+
+ def analyse_types(self, env):
+ self.arg.analyse_types(env)
+ self.arg = self.arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Backquote expression"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyObject_Repr(%s); if (!%s) %s" % (
+ self.result(),
+ self.arg.py_result(),
+ self.result(),
+ code.error_goto(self.pos)))
+
+
+class ImportNode(ExprNode):
+ # Used as part of import statement implementation.
+ # Implements result =
+ # __import__(module_name, globals(), None, name_list)
+ #
+ # module_name StringNode dotted name of module
+ # name_list ListNode or None list of names to be imported
+
+ subexprs = ['module_name', 'name_list']
+
+ def analyse_types(self, env):
+ self.module_name.analyse_types(env)
+ self.module_name = self.module_name.coerce_to_pyobject(env)
+ if self.name_list:
+ self.name_list.analyse_types(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+# env.use_utility_code(import_utility_code)
+
+ gil_message = "Python import"
+
+ def generate_result_code(self, code):
+ if self.name_list:
+ name_list_code = self.name_list.py_result()
+ else:
+ name_list_code = "0"
+ code.use_utility_code(import_utility_code)
+ result = self.result()
+ code.putln(
+ "%s = __Pyx_Import(%s, %s); if (!%s) %s" % (
+ result,
+ self.module_name.py_result(),
+ name_list_code,
+ result,
+ code.error_goto(self.pos)))
+
+
+class IteratorNode(ExprNode):
+ # Used as part of for statement implementation.
+ # Implements result = iter(sequence)
+ #
+ # sequence ExprNode
+
+ subexprs = ['sequence']
+
+ def analyse_types(self, env):
+ self.sequence.analyse_types(env)
+ self.sequence = self.sequence.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Iterating over Python object"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyObject_GetIter(%s); if (!%s) %s" % (
+ result,
+ self.sequence.py_result(),
+ result,
+ code.error_goto(self.pos)))
+
+
+class NextNode(AtomicExprNode):
+ # Used as part of for statement implementation.
+ # Implements result = iterator.next()
+ # Created during analyse_types phase.
+ # The iterator is not owned by this node.
+ #
+ # iterator ExprNode
+
+ def __init__(self, iterator, env):
+ self.pos = iterator.pos
+ self.iterator = iterator
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyIter_Next(%s);" % (
+ result,
+ self.iterator.py_result()))
+ code.putln(
+ "if (!%s) {" %
+ result)
+ code.putln(
+ "if (PyErr_Occurred()) %s" %
+ code.error_goto(self.pos))
+ code.putln(
+ "break;")
+ code.putln(
+ "}")
+
+
+class ExcValueNode(AtomicExprNode):
+ # Node created during analyse_types phase
+ # of an ExceptClauseNode to fetch the current
+ # exception or traceback value.
+
+ def __init__(self, pos, env, var):
+ ExprNode.__init__(self, pos)
+ self.type = py_object_type
+ self.var = var
+
+ def calculate_result_code(self):
+ return self.var
+
+ def generate_result_code(self, code):
+ pass
+
+
+class TempNode(AtomicExprNode):
+ # Node created during analyse_types phase
+ # of some nodes to hold a temporary value.
+
+ def __init__(self, pos, type, env):
+ ExprNode.__init__(self, pos)
+ self.type = type
+ if type.is_pyobject:
+ self.result_ctype = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ pass
+
+
+class PyTempNode(TempNode):
+ # TempNode holding a Python value.
+
+ def __init__(self, pos, env):
+ TempNode.__init__(self, pos, PyrexTypes.py_object_type, env)
+
+
+#-------------------------------------------------------------------
+#
+# Trailer nodes
+#
+#-------------------------------------------------------------------
+
+class IndexNode(ExprNode):
+ # Sequence indexing.
+ #
+ # base ExprNode
+ # index ExprNode
+
+ subexprs = ['base', 'index']
+
+ def compile_time_value(self, denv):
+ base = self.base.compile_time_value(denv)
+ index = self.index.compile_time_value(denv)
+ try:
+ return base[index]
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def is_ephemeral(self):
+ return self.base.is_ephemeral()
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_types(self, env):
+ self.analyse_base_and_index_types(env, getting = 1)
+
+ def analyse_target_types(self, env):
+ self.analyse_base_and_index_types(env, setting = 1)
+
+ def analyse_inplace_types(self, env):
+ self.analyse_base_and_index_types(env, getting = 1, setting = 1)
+
+ def analyse_base_and_index_types(self, env, getting = 0, setting = 0):
+ self.base.analyse_types(env)
+ self.index.analyse_types(env)
+ btype = self.base.type
+ if btype.is_pyobject:
+ itype = self.index.type
+ if not (btype.is_sequence and itype.is_int and itype.signed):
+ self.index = self.index.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+ else:
+ if self.base.type.is_ptr or self.base.type.is_array:
+ self.type = self.base.type.base_type
+ else:
+ error(self.pos,
+ "Attempting to index non-array type '%s'" %
+ self.base.type)
+ self.type = PyrexTypes.error_type
+ if self.index.type.is_pyobject:
+ self.index = self.index.coerce_to(
+ PyrexTypes.c_py_ssize_t_type, env)
+ if not self.index.type.is_int:
+ error(self.pos,
+ "Invalid index type '%s'" %
+ self.index.type)
+
+ gil_message = "Indexing Python object"
+
+ def check_const_addr(self):
+ self.base.check_const_addr()
+ self.index.check_const()
+
+ def is_lvalue(self):
+ return 1
+
+ def is_inplace_lvalue(self):
+ return 1
+
+ def calculate_result_code(self):
+ return "(%s[%s])" % (
+ self.base.result(), self.index.result())
+
+ def generate_result_code(self, code):
+ if self.type.is_pyobject:
+ itype = self.index.type
+ if itype.is_int and itype.signed:
+ code.use_utility_code(getitem_int_utility_code)
+ function = "__Pyx_GetItemInt"
+ index_code = self.index.result()
+ else:
+ function = "PyObject_GetItem"
+ index_code = self.index.py_result()
+ result = self.result()
+ code.putln(
+ "%s = %s(%s, %s); if (!%s) %s" % (
+ result,
+ function,
+ self.base.py_result(),
+ index_code,
+ result,
+ code.error_goto(self.pos)))
+
+ def generate_setitem_code(self, value_code, code):
+ itype = self.index.type
+ if itype.is_int and itype.signed:
+ code.use_utility_code(setitem_int_utility_code)
+ function = "__Pyx_SetItemInt"
+ index_code = self.index.result()
+ else:
+ function = "PyObject_SetItem"
+ index_code = self.index.py_result()
+ code.putln(
+ "if (%s(%s, %s, %s) < 0) %s" % (
+ function,
+ self.base.py_result(),
+ index_code,
+ value_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ if self.type.is_pyobject:
+ self.generate_setitem_code(rhs.py_result(), code)
+ else:
+ code.putln(
+ "%s = %s;" % (
+ self.result(), rhs.result()))
+ self.generate_subexpr_disposal_code(code)
+ rhs.generate_disposal_code(code)
+
+ def generate_inplace_assignment_code(self, operator, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ if self.type.is_pyobject:
+ self.generate_result_code(code)
+ self.generate_inplace_operation_code(operator, rhs, code)
+ self.generate_setitem_code(self.inplace_result, code)
+ self.generate_inplace_result_disposal_code(code)
+ else:
+ code.putln("%s %s %s;" % (self.result(), operator, rhs.result()))
+ rhs.generate_disposal_code(code)
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.generate_subexpr_evaluation_code(code)
+ if self.base.type.is_sequence and self.index.type.is_int:
+ function = "PySequence_DelItem"
+ index_code = self.index.result()
+ else:
+ function = "PyObject_DelItem"
+ index_code = self.index.py_result()
+ code.putln(
+ "if (%s(%s, %s) < 0) %s" % (
+ function,
+ self.base.py_result(),
+ index_code,
+ code.error_goto(self.pos)))
+ #else:
+ # error(self.pos, "Cannot delete non-Python variable")
+ self.generate_subexpr_disposal_code(code)
+
+
+class SliceIndexNode(ExprNode):
+ # 2-element slice indexing
+ #
+ # base ExprNode
+ # start ExprNode or None
+ # stop ExprNode or None
+
+ subexprs = ['base', 'start', 'stop']
+
+ def is_inplace_lvalue(self):
+ return 1
+
+ def compile_time_value(self, denv):
+ base = self.base.compile_time_value(denv)
+ start = self.start.compile_time_value(denv)
+ stop = self.stop.compile_time_value(denv)
+ try:
+ return base[start:stop]
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_types(self, env):
+ self.base.analyse_types(env)
+ if self.start:
+ self.start.analyse_types(env)
+ if self.stop:
+ self.stop.analyse_types(env)
+ self.base = self.base.coerce_to_pyobject(env)
+ c_int = PyrexTypes.c_py_ssize_t_type
+ if self.start:
+ self.start = self.start.coerce_to(c_int, env)
+ if self.stop:
+ self.stop = self.stop.coerce_to(c_int, env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Slicing Python object"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PySequence_GetSlice(%s, %s, %s); if (!%s) %s" % (
+ result,
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ result,
+ code.error_goto(self.pos)))
+
+ def generate_setslice_code(self, value_code, code):
+ code.putln(
+ "if (PySequence_SetSlice(%s, %s, %s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ value_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ self.generate_setslice_code(rhs.result(), code)
+ self.generate_subexpr_disposal_code(code)
+ rhs.generate_disposal_code(code)
+
+ def generate_inplace_assignment_code(self, operator, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ self.generate_result_code(code)
+ self.generate_inplace_operation_code(operator, rhs, code)
+ self.generate_setslice_code(self.inplace_result, code)
+ self.generate_inplace_result_disposal_code(code)
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.generate_subexpr_evaluation_code(code)
+ code.putln(
+ "if (PySequence_DelSlice(%s, %s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ code.error_goto(self.pos)))
+ self.generate_subexpr_disposal_code(code)
+
+ def start_code(self):
+ if self.start:
+ return self.start.result()
+ else:
+ return "0"
+
+ def stop_code(self):
+ if self.stop:
+ return self.stop.result()
+ else:
+ return "PY_SSIZE_T_MAX"
+
+# def calculate_result_code(self):
+# # self.result_code is not used, but this method must exist
+# return "<unused>"
+
+
+class SliceNode(ExprNode):
+ # start:stop:step in subscript list
+ #
+ # start ExprNode
+ # stop ExprNode
+ # step ExprNode
+
+ def compile_time_value(self, denv):
+ start = self.start.compile_time_value(denv)
+ stop = self.stop.compile_time_value(denv)
+ step = step.step.compile_time_value(denv)
+ try:
+ return slice(start, stop, step)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ subexprs = ['start', 'stop', 'step']
+
+ def analyse_types(self, env):
+ self.start.analyse_types(env)
+ self.stop.analyse_types(env)
+ self.step.analyse_types(env)
+ self.start = self.start.coerce_to_pyobject(env)
+ self.stop = self.stop.coerce_to_pyobject(env)
+ self.step = self.step.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Constructing Python slice object"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PySlice_New(%s, %s, %s); if (!%s) %s" % (
+ result,
+ self.start.py_result(),
+ self.stop.py_result(),
+ self.step.py_result(),
+ result,
+ code.error_goto(self.pos)))
+
+
+class CallNode(ExprNode):
+
+ def gil_check(self, env):
+ # Make sure we're not in a nogil environment
+ if env.nogil:
+ error(self.pos, "Calling gil-requiring function without gil")
+
+
+class SimpleCallNode(CallNode):
+ # Function call without keyword, * or ** args.
+ #
+ # function ExprNode
+ # args [ExprNode]
+ # arg_tuple ExprNode or None used internally
+ # self ExprNode or None used internally
+ # coerced_self ExprNode or None used internally
+ # function_type PyrexType used internally
+
+ subexprs = ['self', 'coerced_self', 'function', 'args', 'arg_tuple']
+
+ self = None
+ coerced_self = None
+ arg_tuple = None
+ is_new = False
+
+ cplus_argless_constr_type = CFuncType(None, [])
+
+ def compile_time_value(self, denv):
+ function = self.function.compile_time_value(denv)
+ args = [arg.compile_time_value(denv) for arg in self.args]
+ try:
+ return function(*args)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ #print "SimpleCallNode.analyse_types:", self.pos ###
+ function = self.function
+ function.is_called = 1
+ function.analyse_as_function(env)
+ if function.is_name or function.is_attribute:
+ #print "SimpleCallNode.analyse_types:", self.pos, "is name or attribute" ###
+ func_entry = function.entry
+ if func_entry:
+ if func_entry.is_cmethod or func_entry.is_builtin_method:
+ # Take ownership of the object from which the attribute
+ # was obtained, because we need to pass it as 'self'.
+ #print "SimpleCallNode: Snarfing self argument" ###
+ self.self = function.obj
+ function.obj = CloneNode(self.self)
+ elif self.is_new:
+ if not (func_entry.is_type and func_entry.type.is_struct_or_union
+ and func_entry.type.scope.is_cplus):
+ error(self.pos, "'new' operator can only be used on a C++ struct type")
+ self.type = error_type
+ return
+ else:
+ #print "SimpleCallNode.analyse_types:", self.pos, "not name or attribute" ###
+ if self.is_new:
+ error(self.pos, "Invalid use of 'new' operator")
+ self.type = error_type
+ return
+ func_type = self.function.type
+ if func_type.is_ptr:
+ func_type = func_type.base_type
+ self.function_type = func_type
+ if func_type.is_pyobject:
+ #print "SimpleCallNode: Python call" ###
+ if self.args:
+ self.arg_tuple = TupleNode(self.pos, args = self.args)
+ self.arg_tuple.analyse_types(env)
+ else:
+ self.arg_tuple = None
+ self.args = None
+ if function.is_name and function.type_entry:
+ # We are calling an extension type constructor
+ self.type = function.type_entry.type
+ self.result_ctype = py_object_type
+ else:
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+ else:
+ #print "SimpleCallNode: C call" ###
+ for arg in self.args:
+ arg.analyse_types(env)
+ if func_type.is_cfunction:
+ self.type = func_type.return_type
+ if self.is_new:
+ self.type = CPtrType(self.type)
+ if func_type.is_overloaded:
+ func_type = self.resolve_overloading()
+ if not func_type:
+ self.type = error_type
+ return
+ if self.self and func_type.args:
+ #print "SimpleCallNode: Inserting self into argument list" ###
+ # Coerce 'self' to the type expected by the method.
+ expected_type = func_type.args[0].type
+ self.coerced_self = CloneNode(self.self).coerce_to(
+ expected_type, env)
+ # Insert coerced 'self' argument into argument list.
+ self.args.insert(0, self.coerced_self)
+ self.analyse_c_function_call(env)
+
+ def resolve_overloading(self):
+ func_type = self.function_type
+ arg_types = [arg.type for arg in self.args]
+ signatures = func_type.signatures or [self.cplus_argless_constr_type]
+ for signature in signatures:
+ if signature.callable_with(arg_types):
+ signature.return_type = func_type.return_type
+ self.function_type = signature
+ return signature
+ def display_types(types):
+ return ", ".join([str(type) for type in types])
+ error(self.pos, "No matching signature found for argument types (%s)"
+ % display_types(arg_types))
+ if signatures:
+ error(self.pos, "Candidates are:")
+ for signature in signatures:
+ error(signature.pos, "(%s)" % display_types(signature.args))
+
+ def analyse_c_function_call(self, env):
+ func_type = self.function_type
+ # Check function type
+ if not func_type.is_cfunction:
+ if not func_type.is_error:
+ error(self.pos, "Calling non-function type '%s'" %
+ func_type)
+ self.type = PyrexTypes.error_type
+ return
+ # Check no. of args
+ expected_nargs = len(func_type.args)
+ actual_nargs = len(self.args)
+ if actual_nargs < expected_nargs \
+ or (not func_type.has_varargs and actual_nargs > expected_nargs):
+ expected_str = str(expected_nargs)
+ if func_type.has_varargs:
+ expected_str = "at least " + expected_str
+ error(self.pos,
+ "Call with wrong number of arguments (expected %s, got %s)"
+ % (expected_str, actual_nargs))
+ self.args = None
+ self.type = PyrexTypes.error_type
+ return
+ # Coerce arguments
+ for i in range(expected_nargs):
+ formal_type = func_type.args[i].type
+ self.args[i] = self.args[i].coerce_to(formal_type, env)
+ for i in range(expected_nargs, actual_nargs):
+ if self.args[i].type.is_pyobject:
+ error(self.args[i].pos,
+ "Python object cannot be passed as a varargs parameter")
+ # Calc result code fragment
+ #print "SimpleCallNode.analyse_c_function_call: self.type =", self.type ###
+ if self.type.is_pyobject \
+ or func_type.exception_value is not None \
+ or func_type.exception_check:
+ self.is_temp = 1
+ if self.type.is_pyobject:
+ self.result_ctype = py_object_type
+ # Check gil
+ if not func_type.nogil:
+ self.gil_check(env)
+ if func_type.exception_check and env.nogil:
+ self.gil_error("Calling 'except ?' or 'except *' function")
+
+ def calculate_result_code(self):
+ return self.c_call_code()
+
+ def c_call_code(self):
+ if self.type.is_error or self.args is None or not self.function_type.is_cfunction:
+ return "<error>"
+ func_type = self.function_type
+ formal_args = func_type.args
+ arg_list_code = []
+ for (formal_arg, actual_arg) in zip(formal_args, self.args):
+ arg_code = actual_arg.result_as(formal_arg.type)
+ arg_list_code.append(arg_code)
+ for actual_arg in self.args[len(formal_args):]:
+ arg_list_code.append(actual_arg.result())
+ result = "%s(%s)" % (self.function.result(),
+ join(arg_list_code, ","))
+ if self.is_new:
+ result = "new " + result
+ return result
+
+ def generate_result_code(self, code):
+ if self.type.is_error:
+ return
+ func_type = self.function_type
+ result = self.result()
+ if func_type.is_pyobject:
+ if self.arg_tuple:
+ arg_code = self.arg_tuple.py_result()
+ else:
+ arg_code = "0"
+ code.putln(
+ "%s = PyObject_CallObject(%s, %s); if (!%s) %s" % (
+ result,
+ self.function.py_result(),
+ arg_code,
+ result,
+ code.error_goto(self.pos)))
+ elif func_type.is_cfunction:
+ exc_checks = []
+ if self.type.is_pyobject:
+ exc_checks.append("!%s" % result)
+ else:
+ exc_val = func_type.exception_value
+ exc_check = func_type.exception_check
+ if exc_val is not None:
+ exc_checks.append("%s == %s" % (self.result(), exc_val))
+ if exc_check:
+ exc_checks.append("PyErr_Occurred()")
+ if self.is_temp or exc_checks:
+ rhs = self.c_call_code()
+ result = self.result()
+ if result:
+ lhs = "%s = " % result
+ if self.is_temp and self.type.is_pyobject:
+ #return_type = self.type # func_type.return_type
+ #print "SimpleCallNode.generate_result_code: casting", rhs, \
+ # "from", return_type, "to pyobject" ###
+ rhs = typecast(py_object_type, self.type, rhs)
+ else:
+ lhs = ""
+ code.putln(
+ "%s%s; if (%s) %s" % (
+ lhs,
+ rhs,
+ " && ".join(exc_checks),
+ code.error_goto(self.pos)))
+
+
+class GeneralCallNode(CallNode):
+ # General Python function call, including keyword,
+ # * and ** arguments.
+ #
+ # function ExprNode
+ # positional_args ExprNode Tuple of positional arguments
+ # keyword_args ExprNode or None Dict of keyword arguments
+ # starstar_arg ExprNode or None Dict of extra keyword args
+
+ subexprs = ['function', 'positional_args', 'keyword_args', 'starstar_arg']
+
+ def compile_time_value(self, denv):
+ function = self.function.compile_time_value(denv)
+ positional_args = self.positional_args.compile_time_value(denv)
+ keyword_args = self.keyword_args.compile_time_value(denv)
+ starstar_arg = self.starstar_arg.compile_time_value(denv)
+ try:
+ keyword_args.update(starstar_arg)
+ return function(*positional_args, **keyword_args)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ function = self.function
+ function.analyse_types(env)
+ self.positional_args.analyse_types(env)
+ if self.keyword_args:
+ self.keyword_args.analyse_types(env)
+ if self.starstar_arg:
+ self.starstar_arg.analyse_types(env)
+ self.function = self.function.coerce_to_pyobject(env)
+ self.positional_args = \
+ self.positional_args.coerce_to_pyobject(env)
+ if self.starstar_arg:
+ self.starstar_arg = \
+ self.starstar_arg.coerce_to_pyobject(env)
+ if function.is_name and function.type_entry:
+ # We are calling an extension type constructor
+ self.type = function.type_entry.type
+ self.result_ctype = py_object_type
+ else:
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ if self.keyword_args and self.starstar_arg:
+ code.putln(
+ "if (PyDict_Update(%s, %s) < 0) %s" % (
+ self.keyword_args.py_result(),
+ self.starstar_arg.py_result(),
+ code.error_goto(self.pos)))
+ keyword_code = self.keyword_args.py_result()
+ elif self.keyword_args:
+ keyword_code = self.keyword_args.py_result()
+ elif self.starstar_arg:
+ keyword_code = self.starstar_arg.py_result()
+ else:
+ keyword_code = None
+ if not keyword_code:
+ call_code = "PyObject_CallObject(%s, %s)" % (
+ self.function.py_result(),
+ self.positional_args.py_result())
+ else:
+ call_code = "PyEval_CallObjectWithKeywords(%s, %s, %s)" % (
+ self.function.py_result(),
+ self.positional_args.py_result(),
+ keyword_code)
+ result = self.result()
+ code.putln(
+ "%s = %s; if (!%s) %s" % (
+ result,
+ call_code,
+ result,
+ code.error_goto(self.pos)))
+
+
+class AsTupleNode(ExprNode):
+ # Convert argument to tuple. Used for normalising
+ # the * argument of a function call.
+ #
+ # arg ExprNode
+
+ subexprs = ['arg']
+
+ def compile_time_value(self, denv):
+ arg = self.arg.compile_time_value(denv)
+ try:
+ return tuple(arg)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ self.arg.analyse_types(env)
+ self.arg = self.arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Constructing Python tuple"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PySequence_Tuple(%s); if (!%s) %s" % (
+ result,
+ self.arg.py_result(),
+ result,
+ code.error_goto(self.pos)))
+
+
+class AttributeNode(ExprNode):
+ # obj.attribute
+ #
+ # obj ExprNode
+ # attribute string
+ #
+ # Used internally:
+ #
+ # is_py_attr boolean Is a Python getattr operation
+ # member string C name of struct member
+ # is_called boolean Function call is being done on result
+ # entry Entry Symbol table entry of attribute
+ # interned_attr_cname string C name of interned attribute name
+
+ is_attribute = 1
+ subexprs = ['obj']
+
+ type = PyrexTypes.error_type
+ result_code = "<error>"
+ entry = None
+ is_called = 0
+
+ def compile_time_value(self, denv):
+ attr = self.attribute
+ if attr.startswith("__") and attr.endswith("__"):
+ self.error("Invalid attribute name '%s' in compile-time expression"
+ % attr)
+ return None
+ obj = self.obj.compile_time_value(denv)
+ try:
+ return getattr(obj, attr)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_target_types(self, env):
+ self.analyse_types(env, target = 1)
+
+ def analyse_as_function(self, env):
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and entry.is_type:
+ self.mutate_into_name_node(entry)
+ self.analyse_constructor_entry(env)
+ return
+ self.analyse_types(env)
+
+ def analyse_types(self, env, target = 0):
+ if self.analyse_as_cimported_attribute(env, target):
+ return
+ if not target and self.analyse_as_unbound_cmethod(env):
+ return
+ self.analyse_as_ordinary_attribute(env, target)
+
+ def analyse_as_cimported_attribute(self, env, target = 0, allow_type = 0):
+ # Try to interpret this as a reference to an imported
+ # C const, type, var or function. If successful, mutates
+ # this node into a NameNode and returns 1, otherwise
+ # returns 0.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and (
+ entry.is_cglobal or entry.is_cfunction
+ or entry.is_type or entry.is_const):
+ self.mutate_into_name_node(entry)
+ if entry.is_type and allow_type:
+ pass
+ elif target:
+ self.analyse_target_types(env)
+ else:
+ self.analyse_rvalue_entry(env)
+ return 1
+ return 0
+
+ def analyse_as_unbound_cmethod(self, env):
+ # Try to interpret this as a reference to an unbound
+ # C method of an extension type. If successful, mutates
+ # this node into a NameNode and returns 1, otherwise
+ # returns 0.
+ type = self.obj.analyse_as_extension_type(env)
+ if type:
+ entry = type.scope.lookup_here(self.attribute)
+ if entry and entry.is_cmethod:
+ # Create a temporary entry describing the C method
+ # as an ordinary function.
+ ubcm_entry = Symtab.Entry(entry.name,
+ "%s->%s" % (type.vtabptr_cname, entry.cname),
+ entry.type)
+ ubcm_entry.is_cfunction = 1
+ ubcm_entry.func_cname = entry.func_cname
+ self.mutate_into_name_node(ubcm_entry)
+ self.analyse_rvalue_entry(env)
+ return 1
+ return 0
+
+ def analyse_as_extension_type(self, env):
+ # Try to interpret this as a reference to an extension type
+ # in a cimported module. Returns the extension type, or None.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and entry.is_type and entry.type.is_extension_type:
+ return entry.type
+ return None
+
+ def analyse_as_module(self, env):
+ # Try to interpret this as a reference to a cimported module
+ # in another cimported module. Returns the module scope, or None.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and entry.as_module:
+ return entry.as_module
+ return None
+
+ def mutate_into_name_node(self, entry):
+ # Turn this node into a NameNode with the given entry.
+ self.__class__ = NameNode
+ self.name = self.attribute
+ self.entry = entry
+ del self.obj
+ del self.attribute
+
+ def analyse_as_ordinary_attribute(self, env, target):
+ self.obj.analyse_types(env)
+ self.analyse_attribute(env)
+ if self.entry and self.entry.is_cmethod and not self.is_called:
+ error(self.pos, "C method can only be called")
+ if self.is_py_attr:
+ if not target:
+ self.is_temp = 1
+ self.result_ctype = py_object_type
+
+ def analyse_attribute(self, env):
+ # Look up attribute and set self.type and self.member.
+ self.is_py_attr = 0
+ self.member = self.attribute
+ if self.obj.type.is_string:
+ self.obj = self.obj.coerce_to_pyobject(env)
+ obj_type = self.obj.type
+ if obj_type.is_ptr:
+ obj_type = obj_type.base_type
+ self.op = "->"
+ elif obj_type.is_extension_type:
+ self.op = "->"
+ else:
+ self.op = "."
+ if obj_type.has_attributes:
+ entry = None
+ if obj_type.attributes_known():
+ entry = obj_type.scope.lookup_here(self.attribute)
+ else:
+ error(self.pos,
+ "Cannot select attribute of incomplete type '%s'"
+ % obj_type)
+ obj_type = PyrexTypes.error_type
+ self.entry = entry
+ if entry:
+ if obj_type.is_extension_type and entry.name == "__weakref__":
+ error(self.pos, "Illegal use of special attribute __weakref__")
+ if entry.is_variable or entry.is_cmethod:
+ self.type = entry.type
+ self.member = entry.cname
+ return
+ if entry.is_builtin_method and self.is_called:
+ # Mutate into NameNode referring to C function
+ #print "AttributeNode: Mutating builtin method into NameNode" ###
+ self.type = entry.type
+ self.__class__ = NameNode
+ return
+ else:
+ # If it's not a variable or C method, it must be a Python
+ # method of an extension type, so we treat it like a Python
+ # attribute.
+ pass
+ # If we get here, the base object is not a struct/union/extension
+ # type, or it is an extension type and the attribute is either not
+ # declared or is declared as a Python method. Treat it as a Python
+ # attribute reference.
+ if obj_type.is_pyobject:
+ self.type = py_object_type
+ self.is_py_attr = 1
+ #self.interned_attr_cname = env.intern(self.attribute)
+ self.gil_check(env)
+ else:
+ if not obj_type.is_error:
+ error(self.pos,
+ "Object of type '%s' has no attribute '%s'" %
+ (obj_type, self.attribute))
+
+ gil_message = "Accessing Python attribute"
+
+ def is_simple(self):
+ if self.obj:
+ return self.result_in_temp() or self.obj.is_simple()
+ else:
+ return NameNode.is_simple(self)
+
+ def is_lvalue(self):
+ if self.obj:
+ return 1
+ else:
+ return NameNode.is_lvalue(self)
+
+ def is_inplace_lvalue(self):
+ return self.is_lvalue()
+
+ def is_ephemeral(self):
+ if self.obj:
+ return self.obj.is_ephemeral()
+ else:
+ return NameNode.is_ephemeral(self)
+
+ def calculate_result_code(self):
+ obj = self.obj
+ obj_code = obj.result_as(obj.type)
+ if self.entry and self.entry.is_cmethod:
+ return "((struct %s *)%s%s%s)->%s" % (
+ obj.type.vtabstruct_cname, obj_code, self.op,
+ obj.type.vtabslot_cname, self.member)
+ else:
+ return "%s%s%s" % (obj_code, self.op, self.member)
+
+ def generate_result_code(self, code):
+ if self.is_py_attr:
+ result = self.result()
+ cname = code.intern(self.attribute)
+ code.putln(
+ '%s = PyObject_GetAttr(%s, %s); if (!%s) %s' % (
+ result,
+ self.obj.py_result(),
+ cname,
+ result,
+ code.error_goto(self.pos)))
+
+ def generate_setattr_code(self, value_code, code):
+ cname = code.intern(self.attribute)
+ code.putln(
+ 'if (PyObject_SetAttr(%s, %s, %s) < 0) %s' % (
+ self.obj.py_result(),
+ cname,
+ value_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.obj.generate_evaluation_code(code)
+ if self.is_py_attr:
+ self.generate_setattr_code(rhs.py_result(), code)
+ rhs.generate_disposal_code(code)
+ else:
+ select_code = self.result()
+ if self.type.is_pyobject:
+ rhs.make_owned_reference(code)
+ code.put_decref(select_code, self.ctype())
+ code.putln(
+ "%s = %s;" % (
+ select_code,
+ rhs.result_as(self.ctype())))
+ rhs.generate_post_assignment_code(code)
+ self.obj.generate_disposal_code(code)
+
+ def generate_inplace_assignment_code(self, operator, rhs, code):
+ self.obj.generate_evaluation_code(code)
+ select_code = self.result()
+ if self.type.is_pyobject:
+ self.generate_result_code(code)
+ self.generate_inplace_operation_code(operator, rhs, code)
+ if self.is_py_attr:
+ self.generate_setattr_code(self.inplace_result, code)
+ self.generate_inplace_result_disposal_code(code)
+ else:
+ code.put_decref(select_code, self.ctype())
+ cast_inplace_result = typecast(self.ctype(), py_object_type, self.inplace_result)
+ code.putln("%s = %s;" % (select_code, cast_inplace_result))
+ else:
+ code.putln("%s %s %s;" % (select_code, operator, rhs.result()))
+ rhs.generate_disposal_code(code)
+ self.obj.generate_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.obj.generate_evaluation_code(code)
+ if self.is_py_attr:
+ cname = code.intern(self.attribute)
+ code.putln(
+ 'if (PyObject_DelAttr(%s, %s) < 0) %s' % (
+ self.obj.py_result(),
+ cname,
+ code.error_goto(self.pos)))
+ else:
+ error(self.pos, "Cannot delete C attribute of extension type")
+ self.obj.generate_disposal_code(code)
+
+#-------------------------------------------------------------------
+#
+# Constructor nodes
+#
+#-------------------------------------------------------------------
+
+class SequenceNode(ExprNode):
+ # Base class for list and tuple constructor nodes.
+ # Contains common code for performing sequence unpacking.
+ #
+ # args [ExprNode]
+ # iterator ExprNode
+ # unpacked_items [ExprNode] or None
+ # coerced_unpacked_items [ExprNode] or None
+
+ subexprs = ['args']
+
+ is_sequence_constructor = 1
+ unpacked_items = None
+
+ def compile_time_value_list(self, denv):
+ return [arg.compile_time_value(denv) for arg in self.args]
+
+ def analyse_target_declaration(self, env):
+ for arg in self.args:
+ arg.analyse_target_declaration(env)
+
+ def analyse_types(self, env):
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg.analyse_types(env)
+ self.args[i] = arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ def analyse_target_types(self, env):
+ self.iterator = PyTempNode(self.pos, env)
+ self.unpacked_items = []
+ self.coerced_unpacked_items = []
+ for arg in self.args:
+ arg.analyse_target_types(env)
+ unpacked_item = PyTempNode(self.pos, env)
+ coerced_unpacked_item = unpacked_item.coerce_to(arg.type, env)
+ self.unpacked_items.append(unpacked_item)
+ self.coerced_unpacked_items.append(coerced_unpacked_item)
+ self.type = py_object_type
+# env.use_utility_code(unpacking_utility_code)
+
+ def allocate_target_temps(self, env, rhs):
+ self.iterator.allocate_temps(env)
+ if rhs:
+ rhs.release_temp(env)
+ for arg, node in zip(self.args, self.coerced_unpacked_items):
+ node.allocate_temps(env)
+ arg.allocate_target_temps(env, node)
+ #arg.release_target_temp(env)
+ #node.release_temp(env)
+ self.iterator.release_temp(env)
+
+# def release_target_temp(self, env):
+# #for arg in self.args:
+# # arg.release_target_temp(env)
+# #for node in self.coerced_unpacked_items:
+# # node.release_temp(env)
+# self.iterator.release_temp(env)
+
+ def generate_result_code(self, code):
+ self.generate_operation_code(code)
+
+ def generate_assignment_code(self, rhs, code):
+ iter_result = self.iterator.result()
+ code.putln(
+ "%s = PyObject_GetIter(%s); if (!%s) %s" % (
+ iter_result,
+ rhs.py_result(),
+ iter_result,
+ code.error_goto(self.pos)))
+ rhs.generate_disposal_code(code)
+ for i in range(len(self.args)):
+ item = self.unpacked_items[i]
+ code.use_utility_code(unpacking_utility_code)
+ unpack_code = "__Pyx_UnpackItem(%s)" % (
+ self.iterator.py_result())
+ item_result = item.result()
+ code.putln(
+ "%s = %s; if (!%s) %s" % (
+ item_result,
+ typecast(item.ctype(), py_object_type, unpack_code),
+ item_result,
+ code.error_goto(self.pos)))
+ value_node = self.coerced_unpacked_items[i]
+ value_node.generate_evaluation_code(code)
+ self.args[i].generate_assignment_code(value_node, code)
+ code.putln(
+ "if (__Pyx_EndUnpack(%s) < 0) %s" % (
+ self.iterator.py_result(),
+ code.error_goto(self.pos)))
+ if debug_disposal_code:
+ print "UnpackNode.generate_assignment_code:"
+ print "...generating disposal code for", rhs
+ self.iterator.generate_disposal_code(code)
+
+
+class TupleNode(SequenceNode):
+ # Tuple constructor.
+
+ gil_message = "Constructing Python tuple"
+
+ def compile_time_value(self, denv):
+ values = self.compile_time_value_list(denv)
+ try:
+ return tuple(values)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def generate_operation_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyTuple_New(%s); if (!%s) %s" % (
+ result,
+ len(self.args),
+ result,
+ code.error_goto(self.pos)))
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg_result = arg.py_result()
+ # ??? Change this to use make_owned_reference?
+ if not arg.result_in_temp():
+ code.put_incref(arg_result)
+ code.putln(
+ "PyTuple_SET_ITEM(%s, %s, %s);" % (
+ result,
+ i,
+ arg_result))
+
+ def generate_subexpr_disposal_code(self, code):
+ # We call generate_post_assignment_code here instead
+ # of generate_disposal_code, because values were stored
+ # in the tuple using a reference-stealing operation.
+ for arg in self.args:
+ arg.generate_post_assignment_code(code)
+
+
+class ListNode(SequenceNode):
+ # List constructor.
+
+ gil_message = "Constructing Python list"
+
+ def compile_time_value(self, denv):
+ return self.compile_time_value_list(denv)
+
+ def generate_operation_code(self, code):
+ result = self.result()
+ code.putln("%s = PyList_New(%s); if (!%s) %s" %
+ (result,
+ len(self.args),
+ result,
+ code.error_goto(self.pos)))
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg_result = arg.py_result()
+ #if not arg.is_temp:
+ if not arg.result_in_temp():
+ code.put_incref(arg_result)
+ code.putln("PyList_SET_ITEM(%s, %s, %s);" %
+ (result,
+ i,
+ arg_result))
+
+ def generate_subexpr_disposal_code(self, code):
+ # We call generate_post_assignment_code here instead
+ # of generate_disposal_code, because values were stored
+ # in the list using a reference-stealing operation.
+ for arg in self.args:
+ arg.generate_post_assignment_code(code)
+
+
+class DictNode(ExprNode):
+ # Dictionary constructor.
+ #
+ # key_value_pairs [(ExprNode, ExprNode)]
+
+ def compile_time_value(self, denv):
+ pairs = [(key.compile_time_value(denv), value.compile_time_value(denv))
+ for (key, value) in self.key_value_pairs]
+ try:
+ return dict(pairs)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ new_pairs = []
+ for key, value in self.key_value_pairs:
+ key.analyse_types(env)
+ value.analyse_types(env)
+ key = key.coerce_to_pyobject(env)
+ value = value.coerce_to_pyobject(env)
+ new_pairs.append((key, value))
+ self.key_value_pairs = new_pairs
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Constructing Python dict"
+
+ def allocate_temps(self, env, result = None):
+ # Custom method used here because key-value
+ # pairs are evaluated and used one at a time.
+ self.allocate_temp(env, result)
+ for key, value in self.key_value_pairs:
+ key.allocate_temps(env)
+ value.allocate_temps(env)
+ key.release_temp(env)
+ value.release_temp(env)
+
+ def generate_evaluation_code(self, code):
+ # Custom method used here because key-value
+ # pairs are evaluated and used one at a time.
+ result = self.result()
+ code.putln(
+ "%s = PyDict_New(); if (!%s) %s" % (
+ result,
+ result,
+ code.error_goto(self.pos)))
+ for key, value in self.key_value_pairs:
+ key.generate_evaluation_code(code)
+ value.generate_evaluation_code(code)
+ code.putln(
+ "if (PyDict_SetItem(%s, %s, %s) < 0) %s" % (
+ result,
+ key.py_result(),
+ value.py_result(),
+ code.error_goto(self.pos)))
+ key.generate_disposal_code(code)
+ value.generate_disposal_code(code)
+
+
+class ClassNode(ExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs a class object given
+ # a name, tuple of bases and class dictionary.
+ #
+ # name ExprNode Name of the class
+ # bases ExprNode Base class tuple
+ # dict ExprNode Class dict (not owned by this node)
+ # doc ExprNode or None Doc string
+ # module_name string Name of defining module
+
+ subexprs = ['name', 'bases', 'doc']
+
+ def analyse_types(self, env):
+ self.name.analyse_types(env)
+ self.name = self.name.coerce_to_pyobject(env)
+ self.bases.analyse_types(env)
+ if self.doc:
+ self.doc.analyse_types(env)
+ self.doc = self.doc.coerce_to_pyobject(env)
+ self.module_name = env.global_scope().qualified_name
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+# env.use_utility_code(create_class_utility_code)
+
+ gil_message = "Constructing Python class"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ if self.doc:
+ code.putln(
+ 'if (PyDict_SetItemString(%s, "__doc__", %s) < 0) %s' % (
+ self.dict.py_result(),
+ self.doc.py_result(),
+ code.error_goto(self.pos)))
+ code.use_utility_code(create_class_utility_code)
+ code.putln(
+ '%s = __Pyx_CreateClass(%s, %s, %s, "%s"); if (!%s) %s' % (
+ result,
+ self.bases.py_result(),
+ self.dict.py_result(),
+ self.name.py_result(),
+ self.module_name,
+ result,
+ code.error_goto(self.pos)))
+
+
+class UnboundMethodNode(ExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs an unbound method
+ # object from a class and a function.
+ #
+ # class_cname string C var holding the class object
+ # function ExprNode Function object
+
+ subexprs = ['function']
+
+ def analyse_types(self, env):
+ self.function.analyse_types(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Constructing an unbound method"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyMethod_New(%s, 0, %s); if (!%s) %s" % (
+ result,
+ self.function.py_result(),
+ self.class_cname,
+ result,
+ code.error_goto(self.pos)))
+
+
+class PyCFunctionNode(AtomicExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs a PyCFunction object
+ # from a PyMethodDef struct.
+ #
+ # pymethdef_cname string PyMethodDef structure
+ # module_name string Name of defining module
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.module_name = env.global_scope().module_name
+ self.gil_check(env)
+ self.is_temp = 1
+
+ gil_message = "Constructing Python function"
+
+ def generate_result_code(self, code):
+ result = self.result()
+ code.putln(
+ "%s = PyCFunction_NewEx(&%s, 0, %s); if (!%s) %s" % (
+ result,
+ self.pymethdef_cname,
+ code.get_py_string_const(self.module_name),
+ result,
+ code.error_goto(self.pos)))
+
+#-------------------------------------------------------------------
+#
+# Unary operator nodes
+#
+#-------------------------------------------------------------------
+
+compile_time_unary_operators = {
+ 'not': operator.not_,
+ '~': operator.inv,
+ '-': operator.neg,
+ '+': operator.pos,
+}
+
+class UnopNode(ExprNode):
+ # operator string
+ # operand ExprNode
+ #
+ # Processing during analyse_expressions phase:
+ #
+ # analyse_c_operation
+ # Called when the operand is not a pyobject.
+ # - Check operand type and coerce if needed.
+ # - Determine result type and result code fragment.
+ # - Allocate temporary for result if needed.
+
+ subexprs = ['operand']
+
+ def compile_time_value(self, denv):
+ func = compile_time_unary_operators.get(self.operator)
+ if not func:
+ error(self.pos,
+ "Unary '%s' not supported in compile-time expression"
+ % self.operator)
+ operand = self.operand.compile_time_value(denv)
+ try:
+ return func(operand)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ if self.is_py_operation():
+ self.coerce_operand_to_pyobject(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+ else:
+ self.analyse_c_operation(env)
+
+ def check_const(self):
+ self.operand.check_const()
+
+ def is_py_operation(self):
+ return self.operand.type.is_pyobject
+
+ def coerce_operand_to_pyobject(self, env):
+ self.operand = self.operand.coerce_to_pyobject(env)
+
+ def generate_result_code(self, code):
+ if self.operand.type.is_pyobject:
+ self.generate_py_operation_code(code)
+ else:
+ if self.is_temp:
+ self.generate_c_operation_code(code)
+
+ def generate_py_operation_code(self, code):
+ function = self.py_operation_function()
+ result = self.result()
+ code.putln(
+ "%s = %s(%s); if (!%s) %s" % (
+ result,
+ function,
+ self.operand.py_result(),
+ result,
+ code.error_goto(self.pos)))
+
+ def type_error(self):
+ if not self.operand.type.is_error:
+ error(self.pos, "Invalid operand type for '%s' (%s)" %
+ (self.operator, self.operand.type))
+ self.type = PyrexTypes.error_type
+
+
+class NotNode(ExprNode):
+ # 'not' operator
+ #
+ # operand ExprNode
+
+ def compile_time_value(self, denv):
+ operand = self.operand.compile_time_value(denv)
+ try:
+ return not operand
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ self.operand = self.operand.coerce_to_boolean(env)
+ self.type = PyrexTypes.c_int_type
+
+ def calculate_result_code(self):
+ return "(!%s)" % self.operand.result()
+
+ def generate_result_code(self, code):
+ pass
+
+
+class UnaryPlusNode(UnopNode):
+ # unary '+' operator
+
+ operator = '+'
+
+ def analyse_c_operation(self, env):
+ self.type = self.operand.type
+
+ def py_operation_function(self):
+ return "PyNumber_Positive"
+
+ def calculate_result_code(self):
+ return self.operand.result()
+
+
+class UnaryMinusNode(UnopNode):
+ # unary '-' operator
+
+ operator = '-'
+
+ def analyse_c_operation(self, env):
+ if self.operand.type.is_numeric:
+ self.type = self.operand.type
+ else:
+ self.type_error()
+
+ def py_operation_function(self):
+ return "PyNumber_Negative"
+
+ def calculate_result_code(self):
+ return "(-%s)" % self.operand.result()
+
+
+class TildeNode(UnopNode):
+ # unary '~' operator
+
+ def analyse_c_operation(self, env):
+ if self.operand.type.is_int:
+ self.type = self.operand.type
+ else:
+ self.type_error()
+
+ def py_operation_function(self):
+ return "PyNumber_Invert"
+
+ def calculate_result_code(self):
+ return "(~%s)" % self.operand.result()
+
+
+class AmpersandNode(ExprNode):
+ # The C address-of operator.
+ #
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ argtype = self.operand.type
+ if not (argtype.is_cfunction or self.operand.is_lvalue()):
+ self.error("Taking address of non-lvalue")
+ return
+ if argtype.is_pyobject:
+ self.error("Cannot take address of Python variable")
+ return
+ self.type = PyrexTypes.c_ptr_type(argtype)
+
+ def check_const(self):
+ self.operand.check_const_addr()
+
+ def error(self, mess):
+ error(self.pos, mess)
+ self.type = PyrexTypes.error_type
+ self.result_code = "<error>"
+
+ def calculate_result_code(self):
+ return "(&%s)" % self.operand.result()
+
+ def generate_result_code(self, code):
+ pass
+
+
+unop_node_classes = {
+ "+": UnaryPlusNode,
+ "-": UnaryMinusNode,
+ "~": TildeNode,
+}
+
+def unop_node(pos, operator, operand):
+ # Construct unnop node of appropriate class for
+ # given operator.
+ return unop_node_classes[operator](pos,
+ operator = operator,
+ operand = operand)
+
+
+class TypecastNode(ExprNode):
+ # C type cast
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ base_type = self.base_type.analyse(env)
+ _, self.type = self.declarator.analyse(base_type, env)
+ if self.type.is_cfunction:
+ error(self.pos,
+ "Cannot cast to a function type")
+ self.type = PyrexTypes.error_type
+ self.operand.analyse_types(env)
+ to_py = self.type.is_pyobject
+ from_py = self.operand.type.is_pyobject
+ if from_py and not to_py and self.operand.is_ephemeral():
+ error(self.pos, "Casting temporary Python object to non-Python type")
+ # Must do the following, so that the result can be increfed without
+ # the operand getting evaluated twice.
+ if to_py and not from_py:
+ #self.result_ctype = py_object_type
+ #self.is_temp = 1
+ self.operand = self.operand.coerce_to_simple(env)
+
+ def check_const(self):
+ self.operand.check_const()
+
+ def calculate_result_code(self):
+ opnd = self.operand
+ result_code = self.type.cast_code(opnd.result())
+ return result_code
+
+ def result_as(self, type):
+ if not self.is_temp and type.is_pyobject and self.type.is_pyobject:
+ # Optimise away some unnecessary casting
+ return self.operand.result_as(type)
+ else:
+ return ExprNode.result_as(self, type)
+
+ def generate_result_code(self, code):
+ if self.is_temp:
+ code.putln(
+ "%s = %s;" % (
+ self.result(),
+ self.operand.py_result()))
+ code.put_incref(self.py_result())
+
+
+class SizeofNode(ExprNode):
+ # Base class for sizeof(x) expression nodes.
+ #
+ # sizeof_code string
+
+ subexprs = []
+
+ def check_const(self):
+ pass
+
+ def analyse_types(self, env):
+ self.analyse_argument(env)
+ self.type = PyrexTypes.c_size_t_type
+
+ def analyse_type_argument(self, arg_type):
+ if arg_type.is_pyobject:
+ error(self.pos, "Cannot take sizeof Python object")
+ elif arg_type.is_void:
+ error(self.pos, "Cannot take sizeof void")
+ elif not arg_type.is_complete():
+ error(self.pos, "Cannot take sizeof incomplete type '%s'" % arg_type)
+ arg_code = arg_type.declaration_code("")
+ self.sizeof_code = "(sizeof(%s))" % arg_code
+
+ def calculate_result_code(self):
+ return self.sizeof_code
+
+ def generate_result_code(self, code):
+ pass
+
+
+class SizeofTypeNode(SizeofNode):
+ # C sizeof function applied to a type
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+
+ def analyse_argument(self, env):
+ base_type = self.base_type.analyse(env)
+ _, arg_type = self.declarator.analyse(base_type, env)
+ self.analyse_type_argument(arg_type)
+
+
+class SizeofVarNode(SizeofNode):
+ # C sizeof function applied to a variable or qualified name
+ # (which may actually refer to a type)
+ #
+ # operand ExprNode
+
+ #subexprs = ['operand']
+
+ def analyse_argument(self, env):
+ is_type = 0
+ operand = self.operand
+ if operand.analyse_as_cimported_attribute(env, allow_type = 1):
+ if operand.entry.is_type:
+ is_type = 1
+ self.analyse_type_argument(operand.entry.type)
+ else:
+ self.operand.analyse_types(env)
+ self.operand.mark_vars_used()
+ if not is_type:
+ self.sizeof_code = "(sizeof(%s))" % operand.result()
+
+
+#-------------------------------------------------------------------
+#
+# Binary operator nodes
+#
+#-------------------------------------------------------------------
+
+compile_time_binary_operators = {
+ '<': operator.lt,
+ '<=': operator.le,
+ '==': operator.eq,
+ '!=': operator.ne,
+ '>=': operator.ge,
+ '>': operator.gt,
+ 'is': operator.is_,
+ 'is_not': operator.is_not,
+ '+': operator.add,
+ '&': operator.and_,
+ '/': operator.div,
+ '//': operator.floordiv,
+ '<<': operator.lshift,
+ '%': operator.mod,
+ '*': operator.mul,
+ '|': operator.or_,
+ '**': operator.pow,
+ '>>': operator.rshift,
+ '-': operator.sub,
+ #'/': operator.truediv,
+ '^': operator.xor,
+ 'in': lambda x, y: x in y,
+ 'not_in': lambda x, y: x not in y,
+}
+
+def get_compile_time_binop(node):
+ func = compile_time_binary_operators.get(node.operator)
+ if not func:
+ error(node.pos,
+ "Binary '%s' not supported in compile-time expression"
+ % node.operator)
+ return func
+
+class BinopNode(ExprNode):
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ #
+ # Processing during analyse_expressions phase:
+ #
+ # analyse_c_operation
+ # Called when neither operand is a pyobject.
+ # - Check operand types and coerce if needed.
+ # - Determine result type and result code fragment.
+ # - Allocate temporary for result if needed.
+
+ subexprs = ['operand1', 'operand2']
+
+ def compile_time_value(self, denv):
+ func = get_compile_time_binop(self)
+ operand1 = self.operand1.compile_time_value(denv)
+ operand2 = self.operand2.compile_time_value(denv)
+ try:
+ return func(operand1, operand2)
+ except Exception, e:
+ self.compile_time_value_error(e)
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.is_py_operation():
+ self.coerce_operands_to_pyobjects(env)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+ else:
+ self.analyse_c_operation(env)
+
+ def is_py_operation(self):
+ return (self.operand1.type.is_pyobject
+ or self.operand2.type.is_pyobject)
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+
+ def generate_result_code(self, code):
+ #print "BinopNode.generate_result_code:", self.operand1, self.operand2 ###
+ if self.operand1.type.is_pyobject:
+ function = self.py_operation_function()
+ if function == "PyNumber_Power":
+ extra_args = ", Py_None"
+ else:
+ extra_args = ""
+ result = self.result()
+ code.putln(
+ "%s = %s(%s, %s%s); if (!%s) %s" % (
+ result,
+ function,
+ self.operand1.py_result(),
+ self.operand2.py_result(),
+ extra_args,
+ result,
+ code.error_goto(self.pos)))
+ else:
+ if self.is_temp:
+ self.generate_c_operation_code(code)
+
+ def type_error(self):
+ if not (self.operand1.type.is_error
+ or self.operand2.type.is_error):
+ error(self.pos, "Invalid operand types for '%s' (%s; %s)" %
+ (self.operator, self.operand1.type,
+ self.operand2.type))
+ self.type = PyrexTypes.error_type
+
+
+class NumBinopNode(BinopNode):
+ # Binary operation taking numeric arguments.
+
+ def analyse_c_operation(self, env):
+ type1 = self.operand1.type
+ type2 = self.operand2.type
+ if self.operator == "**" and type1.is_int and type2.is_int:
+ error(self.pos, "** with two C int types is ambiguous")
+ self.type = error_type
+ return
+ self.type = self.compute_c_result_type(type1, type2)
+ if not self.type:
+ self.type_error()
+
+ def compute_c_result_type(self, type1, type2):
+ if self.c_types_okay(type1, type2):
+ return PyrexTypes.widest_numeric_type(type1, type2)
+ else:
+ return None
+
+ def c_types_okay(self, type1, type2):
+ #print "NumBinopNode.c_types_okay:", type1, type2 ###
+ return (type1.is_numeric or type1.is_enum) \
+ and (type2.is_numeric or type2.is_enum)
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result(),
+ self.operator,
+ self.operand2.result())
+
+ def py_operation_function(self):
+ return self.py_functions[self.operator]
+
+ py_functions = {
+ "|": "PyNumber_Or",
+ "^": "PyNumber_Xor",
+ "&": "PyNumber_And",
+ "<<": "PyNumber_Lshift",
+ ">>": "PyNumber_Rshift",
+ "+": "PyNumber_Add",
+ "-": "PyNumber_Subtract",
+ "*": "PyNumber_Multiply",
+ "/": "PyNumber_Divide",
+ "%": "PyNumber_Remainder",
+ "**": "PyNumber_Power"
+ }
+
+
+class IntBinopNode(NumBinopNode):
+ # Binary operation taking integer arguments.
+
+ def c_types_okay(self, type1, type2):
+ #print "IntBinopNode.c_types_okay:", type1, type2 ###
+ return (type1.is_int or type1.is_enum) \
+ and (type2.is_int or type2.is_enum)
+
+
+class AddNode(NumBinopNode):
+ # '+' operator.
+
+ def is_py_operation(self):
+ if self.operand1.type.is_string \
+ and self.operand2.type.is_string:
+ return 1
+ else:
+ return NumBinopNode.is_py_operation(self)
+
+ def compute_c_result_type(self, type1, type2):
+ #print "AddNode.compute_c_result_type:", type1, self.operator, type2 ###
+ if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum):
+ return type1
+ elif (type2.is_ptr or type2.is_array) and (type1.is_int or type1.is_enum):
+ return type2
+ else:
+ return NumBinopNode.compute_c_result_type(
+ self, type1, type2)
+
+
+class SubNode(NumBinopNode):
+ # '-' operator.
+
+ def compute_c_result_type(self, type1, type2):
+ if (type1.is_ptr or type1.is_array) and (type2.is_int or type2.is_enum):
+ return type1
+ elif (type1.is_ptr or type1.is_array) and (type2.is_ptr or type2.is_array):
+ return PyrexTypes.c_int_type
+ else:
+ return NumBinopNode.compute_c_result_type(
+ self, type1, type2)
+
+
+class MulNode(NumBinopNode):
+ # '*' operator.
+
+ def is_py_operation(self):
+ type1 = self.operand1.type
+ type2 = self.operand2.type
+ if (type1.is_string and type2.is_int) \
+ or (type2.is_string and type1.is_int):
+ return 1
+ else:
+ return NumBinopNode.is_py_operation(self)
+
+
+class ModNode(IntBinopNode):
+ # '%' operator.
+
+ def is_py_operation(self):
+ return (self.operand1.type.is_string
+ or self.operand2.type.is_string
+ or IntBinopNode.is_py_operation(self))
+
+
+class PowNode(NumBinopNode):
+ # '**' operator.
+
+ def analyse_types(self, env):
+ env.pow_function_used = 1
+ NumBinopNode.analyse_types(self, env)
+
+ def compute_c_result_type(self, type1, type2):
+ if self.c_types_okay(type1, type2):
+ return PyrexTypes.c_double_type
+ else:
+ return None
+
+ def calculate_result_code(self):
+ return "pow(%s, %s)" % (
+ self.operand1.result(), self.operand2.result())
+
+
+class BoolBinopNode(ExprNode):
+ # Short-circuiting boolean operation.
+ #
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ # temp_bool ExprNode used internally
+
+ temp_bool = None
+
+ subexprs = ['operand1', 'operand2', 'temp_bool']
+
+ def compile_time_value(self, denv):
+ if self.operator == 'and':
+ return self.operand1.compile_time_value(denv) \
+ and self.operand2.compile_time_value(denv)
+ else:
+ return self.operand1.compile_time_value(denv) \
+ or self.operand2.compile_time_value(denv)
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.operand1.type.is_pyobject or \
+ self.operand2.type.is_pyobject:
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ self.temp_bool = TempNode(self.pos,
+ PyrexTypes.c_int_type, env)
+ self.type = py_object_type
+ self.gil_check(env)
+ else:
+ self.operand1 = self.operand1.coerce_to_boolean(env)
+ self.operand2 = self.operand2.coerce_to_boolean(env)
+ self.type = PyrexTypes.c_int_type
+ # For what we're about to do, it's vital that
+ # both operands be temp nodes.
+ self.operand1 = self.operand1.coerce_to_temp(env) #CTT
+ self.operand2 = self.operand2.coerce_to_temp(env)
+ self.is_temp = 1
+
+ gil_message = "Truth-testing Python object"
+
+ def allocate_temps(self, env, result_code = None):
+ # We don't need both operands at the same time, and
+ # one of the operands will also be our result. So we
+ # use an allocation strategy here which results in
+ # this node and both its operands sharing the same
+ # result variable. This allows us to avoid some
+ # assignments and increfs/decrefs that would otherwise
+ # be necessary.
+ self.allocate_temp(env, result_code)
+ self.operand1.allocate_temps(env, self.result_code)
+ if self.temp_bool:
+ self.temp_bool.allocate_temp(env)
+ self.temp_bool.release_temp(env)
+ self.operand2.allocate_temps(env, self.result_code)
+ # We haven't called release_temp on either operand,
+ # because although they are temp nodes, they don't own
+ # their result variable. And because they are temp
+ # nodes, any temps in their subnodes will have been
+ # released before their allocate_temps returned.
+ # Therefore, they contain no temp vars that need to
+ # be released.
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result(),
+ self.py_to_c_op[self.operator],
+ self.operand2.result())
+
+ py_to_c_op = {'and': "&&", 'or': "||"}
+
+ def generate_evaluation_code(self, code):
+ self.operand1.generate_evaluation_code(code)
+ test_result = self.generate_operand1_test(code)
+ if self.operator == 'and':
+ sense = ""
+ else:
+ sense = "!"
+ code.putln(
+ "if (%s%s) {" % (
+ sense,
+ test_result))
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_evaluation_code(code)
+ code.putln(
+ "}")
+
+ def generate_operand1_test(self, code):
+ # Generate code to test the truth of the first operand.
+ if self.type.is_pyobject:
+ test_result = self.temp_bool.result()
+ code.putln(
+ "%s = PyObject_IsTrue(%s); if (%s < 0) %s" % (
+ test_result,
+ self.operand1.py_result(),
+ test_result,
+ code.error_goto(self.pos)))
+ else:
+ test_result = self.operand1.result()
+ return test_result
+
+
+class CmpNode:
+ # Mixin class containing code common to PrimaryCmpNodes
+ # and CascadedCmpNodes.
+
+ def cascaded_compile_time_value(self, operand1, denv):
+ func = get_compile_time_binop(self)
+ operand2 = self.operand2.compile_time_value(denv)
+ try:
+ result = func(operand1, operand2)
+ except Exception, e:
+ self.compile_time_value_error(e)
+ result = None
+ if result:
+ cascade = self.cascade
+ if cascade:
+ result = result and cascade.compile_time_value(operand2, denv)
+ return result
+
+ def is_python_comparison(self):
+ return (self.has_python_operands()
+ or (self.cascade and self.cascade.is_python_comparison())
+ or self.operator in ('in', 'not_in'))
+
+ def check_types(self, env, operand1, op, operand2):
+ if not self.types_okay(operand1, op, operand2):
+ error(self.pos, "Invalid types for '%s' (%s, %s)" %
+ (self.operator, operand1.type, operand2.type))
+
+ def types_okay(self, operand1, op, operand2):
+ type1 = operand1.type
+ type2 = operand2.type
+ if type1.is_error or type2.is_error:
+ return 1
+ if type1.is_pyobject: # type2 will be, too
+ return 1
+ elif type1.is_ptr or type1.is_array:
+ return type1.is_null_ptr or type2.is_null_ptr \
+ or ((type2.is_ptr or type2.is_array)
+ and type1.base_type.same_as(type2.base_type))
+ elif ((type1.is_numeric and type2.is_numeric
+ or type1.is_enum and (type2.is_int or type1.same_as(type2))
+ or type1.is_int and type2.is_enum)
+ and op not in ('is', 'is_not')):
+ return 1
+ else:
+ return 0
+
+ def generate_operation_code(self, code, result,
+ operand1, op , operand2):
+ if op == 'in' or op == 'not_in':
+ code.putln(
+ "%s = PySequence_Contains(%s, %s); if (%s < 0) %s" % (
+ result,
+ operand2.py_result(),
+ operand1.py_result(),
+ result,
+ code.error_goto(self.pos)))
+ if op == 'not_in':
+ code.putln(
+ "%s = !%s;" % (
+ result, result))
+ elif (operand1.type.is_pyobject
+ and op not in ('is', 'is_not')):
+ code.putln(
+ "if (PyObject_Cmp(%s, %s, &%s) < 0) %s" % (
+ operand1.py_result(),
+ operand2.py_result(),
+ result,
+ code.error_goto(self.pos)))
+ code.putln(
+ "%s = %s %s 0;" % (
+ result, result, op))
+ else:
+ type1 = operand1.type
+ type2 = operand2.type
+ if (type1.is_extension_type or type2.is_extension_type) \
+ and not operand1.ctype().same_as(operand2.ctype()):
+ code1 = operand1.result_as(py_object_type)
+ code2 = operand2.result_as(py_object_type)
+ else:
+ code1 = operand1.result()
+ code2 = operand2.result()
+ code.putln("%s = %s %s %s;" % (
+ result,
+ code1,
+ self.c_operator(op),
+ code2))
+
+ def c_operator(self, op):
+ if op == 'is':
+ return "=="
+ elif op == 'is_not':
+ return "!="
+ else:
+ return op
+
+
+class PrimaryCmpNode(ExprNode, CmpNode):
+ # Non-cascaded comparison or first comparison of
+ # a cascaded sequence.
+ #
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ # cascade CascadedCmpNode
+
+ # We don't use the subexprs mechanism, because
+ # things here are too complicated for it to handle.
+ # Instead, we override all the framework methods
+ # which use it.
+
+ cascade = None
+
+ def compile_time_value(self, denv):
+ operand1 = self.operand1.compile_time_value(denv)
+ return self.cascaded_compile_time_value(operand1, denv)
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.cascade:
+ self.cascade.analyse_types(env, self.operand2)
+ self.is_pycmp = self.is_python_comparison()
+ if self.is_pycmp:
+ self.coerce_operands_to_pyobjects(env)
+ if self.cascade:
+ self.operand2 = self.operand2.coerce_to_simple(env)
+ self.cascade.coerce_cascaded_operands_to_temp(env)
+ self.check_operand_types(env)
+ self.type = PyrexTypes.c_int_type
+ if self.is_pycmp or self.cascade:
+ self.is_temp = 1
+
+ def check_operand_types(self, env):
+ self.check_types(env,
+ self.operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.check_operand_types(env, self.operand2)
+
+ def has_python_operands(self):
+ return (self.operand1.type.is_pyobject
+ or self.operand2.type.is_pyobject)
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ if self.cascade:
+ self.cascade.coerce_operands_to_pyobjects(env)
+
+ def allocate_subexpr_temps(self, env):
+ self.operand1.allocate_temps(env)
+ self.operand2.allocate_temps(env)
+ if self.cascade:
+ self.cascade.allocate_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ self.operand1.release_temp(env)
+ self.operand2.release_temp(env)
+ if self.cascade:
+ self.cascade.release_subexpr_temps(env)
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+ if self.cascade:
+ self.not_const()
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result(),
+ self.c_operator(self.operator),
+ self.operand2.result())
+
+ def generate_evaluation_code(self, code):
+ self.operand1.generate_evaluation_code(code)
+ self.operand2.generate_evaluation_code(code)
+ if self.is_temp:
+ result = self.result()
+ self.generate_operation_code(code, result,
+ self.operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.generate_evaluation_code(code,
+ result, self.operand2)
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_disposal_code(code)
+
+ def generate_subexpr_disposal_code(self, code):
+ # If this is called, it is a non-cascaded cmp,
+ # so only need to dispose of the two main operands.
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_disposal_code(code)
+
+
+class CascadedCmpNode(Node, CmpNode):
+ # A CascadedCmpNode is not a complete expression node. It
+ # hangs off the side of another comparison node, shares
+ # its left operand with that node, and shares its result
+ # with the PrimaryCmpNode at the head of the chain.
+ #
+ # operator string
+ # operand2 ExprNode
+ # cascade CascadedCmpNode
+
+ cascade = None
+
+ def analyse_types(self, env, operand1):
+ self.operand2.analyse_types(env)
+ if self.cascade:
+ self.cascade.analyse_types(env, self.operand2)
+
+ def check_operand_types(self, env, operand1):
+ self.check_types(env,
+ operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.check_operand_types(env, self.operand2)
+
+ def has_python_operands(self):
+ return self.operand2.type.is_pyobject
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ if self.cascade:
+ self.cascade.coerce_operands_to_pyobjects(env)
+
+ def coerce_cascaded_operands_to_temp(self, env):
+ if self.cascade:
+ #self.operand2 = self.operand2.coerce_to_temp(env) #CTT
+ self.operand2 = self.operand2.coerce_to_simple(env)
+ self.cascade.coerce_cascaded_operands_to_temp(env)
+
+ def allocate_subexpr_temps(self, env):
+ self.operand2.allocate_temps(env)
+ if self.cascade:
+ self.cascade.allocate_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ self.operand2.release_temp(env)
+ if self.cascade:
+ self.cascade.release_subexpr_temps(env)
+
+ def generate_evaluation_code(self, code, result, operand1):
+ code.putln("if (%s) {" % result)
+ self.operand2.generate_evaluation_code(code)
+ self.generate_operation_code(code, result,
+ operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.generate_evaluation_code(
+ code, result, self.operand2)
+ # Cascaded cmp result is always temp
+ self.operand2.generate_disposal_code(code)
+ code.putln("}")
+
+
+binop_node_classes = {
+ "or": BoolBinopNode,
+ "and": BoolBinopNode,
+ "|": IntBinopNode,
+ "^": IntBinopNode,
+ "&": IntBinopNode,
+ "<<": IntBinopNode,
+ ">>": IntBinopNode,
+ "+": AddNode,
+ "-": SubNode,
+ "*": MulNode,
+ "/": NumBinopNode,
+ "%": ModNode,
+ "**": PowNode
+}
+
+def binop_node(pos, operator, operand1, operand2):
+ # Construct binop node of appropriate class for
+ # given operator.
+ return binop_node_classes[operator](pos,
+ operator = operator,
+ operand1 = operand1,
+ operand2 = operand2)
+
+#-------------------------------------------------------------------
+#
+# Coercion nodes
+#
+# Coercion nodes are special in that they are created during
+# the analyse_types phase of parse tree processing.
+# Their __init__ methods consequently incorporate some aspects
+# of that phase.
+#
+#-------------------------------------------------------------------
+
+class CoercionNode(ExprNode):
+ # Abstract base class for coercion nodes.
+ #
+ # arg ExprNode node being coerced
+
+ subexprs = ['arg']
+
+ def __init__(self, arg):
+ self.pos = arg.pos
+ self.arg = arg
+ if debug_coercion:
+ print self, "Coercing", self.arg
+
+
+class CastNode(CoercionNode):
+ # Wrap a node in a C type cast.
+
+ def __init__(self, arg, new_type):
+ CoercionNode.__init__(self, arg)
+ self.type = new_type
+
+ def calculate_result_code(self):
+ return self.arg.result_as(self.type)
+
+ def generate_result_code(self, code):
+ self.arg.generate_result_code(code)
+
+
+class PyTypeTestNode(CoercionNode):
+ # This node is used to check that a generic Python
+ # object is an instance of a particular extension type.
+ # This node borrows the result of its argument node.
+
+ def __init__(self, arg, dst_type, env):
+ # The arg is know to be a Python object, and
+ # the dst_type is known to be an extension type.
+ assert dst_type.is_extension_type, "PyTypeTest on non extension type"
+ CoercionNode.__init__(self, arg)
+ self.type = dst_type
+ self.result_ctype = arg.ctype()
+# env.use_utility_code(type_test_utility_code)
+ self.gil_check(env)
+
+ gil_message = "Python type test"
+
+ def result_in_temp(self):
+ return self.arg.result_in_temp()
+
+ def is_ephemeral(self):
+ return self.arg.is_ephemeral()
+
+ def calculate_result_code(self):
+ return self.arg.result()
+
+ def generate_result_code(self, code):
+ if self.type.typeobj_is_available():
+ code.use_utility_code(type_test_utility_code)
+ code.putln(
+ "if (!__Pyx_TypeTest(%s, %s)) %s" % (
+ self.arg.py_result(),
+ self.type.typeptr_cname,
+ code.error_goto(self.pos)))
+ else:
+ error(self.pos, "Cannot test type of extern C class "
+ "without type object name specification")
+
+ def generate_post_assignment_code(self, code):
+ self.arg.generate_post_assignment_code(code)
+
+
+class CoerceToPyTypeNode(CoercionNode):
+ # This node is used to convert a C data type
+ # to a Python object.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = py_object_type
+ self.gil_check(env)
+ self.is_temp = 1
+ if not arg.type.to_py_function:
+ error(arg.pos,
+ "Cannot convert '%s' to Python object" % arg.type)
+
+ gil_message = "Converting to Python object"
+
+ def generate_result_code(self, code):
+ function = self.arg.type.to_py_function
+ result = self.result()
+ code.putln('%s = %s(%s); if (!%s) %s' % (
+ result,
+ function,
+ self.arg.result(),
+ result,
+ code.error_goto(self.pos)))
+
+
+class CoerceFromPyTypeNode(CoercionNode):
+ # This node is used to convert a Python object
+ # to a C data type.
+
+ def __init__(self, result_type, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = result_type
+ self.is_temp = 1
+ if not result_type.from_py_function:
+ error(arg.pos,
+ "Cannot convert Python object to '%s'" % result_type)
+ if self.type.is_string and self.arg.is_ephemeral():
+ error(arg.pos,
+ "Obtaining char * from temporary Python value")
+
+ def generate_result_code(self, code):
+ function = self.type.from_py_function
+ operand = self.arg.py_result()
+ rhs = "%s(%s)" % (function, operand)
+ if self.type.is_enum:
+ rhs = typecast(self.type, c_long_type, rhs)
+ result = self.result()
+ if self.type.is_string:
+ err_code = "!%s" % result
+ else:
+ err_code = "PyErr_Occurred()"
+ code.putln('%s = %s; if (%s) %s' % (
+ result,
+ rhs,
+ err_code,
+ code.error_goto(self.pos)))
+
+
+class CoerceToBooleanNode(CoercionNode):
+ # This node is used when a result needs to be used
+ # in a boolean context.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = PyrexTypes.c_int_type
+ if arg.type.is_pyobject:
+ if env.nogil:
+ self.gil_error()
+ self.is_temp = 1
+
+ gil_message = "Truth-testing Python object"
+
+ def check_const(self):
+ if self.is_temp:
+ self.not_const()
+ self.arg.check_const()
+
+ def calculate_result_code(self):
+ return "(%s != 0)" % self.arg.result()
+
+ def generate_result_code(self, code):
+ if self.arg.type.is_pyobject:
+ result = self.result()
+ code.putln(
+ "%s = PyObject_IsTrue(%s); if (%s < 0) %s" % (
+ result,
+ self.arg.py_result(),
+ result,
+ code.error_goto(self.pos)))
+
+
+class CoerceToTempNode(CoercionNode):
+ # This node is used to force the result of another node
+ # to be stored in a temporary. It is only used if the
+ # argument node's result is not already in a temporary.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = self.arg.type
+ self.is_temp = 1
+ if self.type.is_pyobject:
+ self.gil_check(env)
+ self.result_ctype = py_object_type
+
+ gil_message = "Creating temporary Python reference"
+
+
+ def generate_result_code(self, code):
+ #self.arg.generate_evaluation_code(code) # Already done
+ # by generic generate_subexpr_evaluation_code!
+ code.putln("%s = %s;" % (
+ self.result(), self.arg.result_as(self.ctype())))
+ if self.type.is_pyobject:
+ code.put_incref(self.py_result())
+
+
+class CloneNode(CoercionNode):
+ # This node is employed when the result of another node needs
+ # to be used multiple times. The argument node's result must
+ # be in a temporary. This node "borrows" the result from the
+ # argument node, and does not generate any evaluation or
+ # disposal code for it. The original owner of the argument
+ # node is responsible for doing those things.
+
+ subexprs = [] # Arg is not considered a subexpr
+
+ def __init__(self, arg):
+ CoercionNode.__init__(self, arg)
+ self.type = arg.type
+ self.result_ctype = arg.result_ctype
+
+ def calculate_result_code(self):
+ return self.arg.result()
+
+ def generate_evaluation_code(self, code):
+ pass
+
+ def generate_result_code(self, code):
+ pass
+
+#------------------------------------------------------------------------------------
+#
+# Runtime support code
+#
+#------------------------------------------------------------------------------------
+
+get_name_utility_code = [
+"""
+static PyObject *__Pyx_GetName(PyObject *dict, char *name); /*proto*/
+""","""
+static PyObject *__Pyx_GetName(PyObject *dict, char *name) {
+ PyObject *result;
+ result = PyObject_GetAttrString(dict, name);
+ if (!result)
+ PyErr_SetString(PyExc_NameError, name);
+ return result;
+}
+"""]
+
+get_name_interned_utility_code = [
+"""
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
+""","""
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) {
+ PyObject *result;
+ result = PyObject_GetAttr(dict, name);
+ if (!result)
+ PyErr_SetObject(PyExc_NameError, name);
+ return result;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+import_utility_code = [
+"""
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/
+""","""
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list) {
+ PyObject *__import__ = 0;
+ PyObject *empty_list = 0;
+ PyObject *module = 0;
+ PyObject *global_dict = 0;
+ PyObject *empty_dict = 0;
+ PyObject *list;
+ __import__ = PyObject_GetAttrString(%(BUILTINS)s, "__import__");
+ if (!__import__)
+ goto bad;
+ if (from_list)
+ list = from_list;
+ else {
+ empty_list = PyList_New(0);
+ if (!empty_list)
+ goto bad;
+ list = empty_list;
+ }
+ global_dict = PyModule_GetDict(%(GLOBALS)s);
+ if (!global_dict)
+ goto bad;
+ empty_dict = PyDict_New();
+ if (!empty_dict)
+ goto bad;
+ module = PyObject_CallFunction(__import__, "OOOO",
+ name, global_dict, empty_dict, list);
+bad:
+ Py_XDECREF(empty_list);
+ Py_XDECREF(__import__);
+ Py_XDECREF(empty_dict);
+ return module;
+}
+""" % {
+ "BUILTINS": Naming.builtins_cname,
+ "GLOBALS": Naming.module_cname,
+}]
+
+#------------------------------------------------------------------------------------
+#
+#get_exception_utility_code = [
+#"""
+#static PyObject *__Pyx_GetExcValue(void); /*proto*/
+#""","""
+#static PyObject *__Pyx_GetExcValue(void) {
+# PyObject *type = 0, *value = 0, *tb = 0;
+# PyObject *result = 0;
+# PyThreadState *tstate = PyThreadState_Get();
+# PyErr_Fetch(&type, &value, &tb);
+# PyErr_NormalizeException(&type, &value, &tb);
+# if (PyErr_Occurred())
+# goto bad;
+# if (!value) {
+# value = Py_None;
+# Py_INCREF(value);
+# }
+# Py_XDECREF(tstate->exc_type);
+# Py_XDECREF(tstate->exc_value);
+# Py_XDECREF(tstate->exc_traceback);
+# tstate->exc_type = type;
+# tstate->exc_value = value;
+# tstate->exc_traceback = tb;
+# result = value;
+# Py_XINCREF(result);
+# type = 0;
+# value = 0;
+# tb = 0;
+#bad:
+# Py_XDECREF(type);
+# Py_XDECREF(value);
+# Py_XDECREF(tb);
+# return result;
+#}
+#"""]
+#
+#------------------------------------------------------------------------------------
+
+unpacking_utility_code = [
+"""
+static PyObject *__Pyx_UnpackItem(PyObject *); /*proto*/
+static int __Pyx_EndUnpack(PyObject *); /*proto*/
+""","""
+static void __Pyx_UnpackError(void) {
+ PyErr_SetString(PyExc_ValueError, "unpack sequence of wrong size");
+}
+
+static PyObject *__Pyx_UnpackItem(PyObject *iter) {
+ PyObject *item;
+ if (!(item = PyIter_Next(iter))) {
+ if (!PyErr_Occurred())
+ __Pyx_UnpackError();
+ }
+ return item;
+}
+
+static int __Pyx_EndUnpack(PyObject *iter) {
+ PyObject *item;
+ if ((item = PyIter_Next(iter))) {
+ Py_DECREF(item);
+ __Pyx_UnpackError();
+ return -1;
+ }
+ else if (!PyErr_Occurred())
+ return 0;
+ else
+ return -1;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+type_test_utility_code = [
+"""
+static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/
+""","""
+static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
+ if (!type) {
+ PyErr_Format(PyExc_SystemError, "Missing type object");
+ return 0;
+ }
+ if (obj == Py_None || PyObject_TypeCheck(obj, type))
+ return 1;
+ PyErr_Format(PyExc_TypeError, "Cannot convert %s to %s",
+ obj->ob_type->tp_name, type->tp_name);
+ return 0;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+create_class_utility_code = [
+"""
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/
+""","""
+static PyObject *__Pyx_CreateClass(
+ PyObject *bases, PyObject *dict, PyObject *name, char *modname)
+{
+ PyObject *py_modname;
+ PyObject *result = 0;
+
+ py_modname = PyString_FromString(modname);
+ if (!py_modname)
+ goto bad;
+ if (PyDict_SetItemString(dict, "__module__", py_modname) < 0)
+ goto bad;
+ result = PyClass_New(bases, dict, name);
+bad:
+ Py_XDECREF(py_modname);
+ return result;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+getitem_int_utility_code = [
+"""
+static PyObject *__Pyx_GetItemInt(PyObject *o, Py_ssize_t i); /*proto*/
+""","""
+static PyObject *__Pyx_GetItemInt(PyObject *o, Py_ssize_t i) {
+ PyTypeObject *t = o->ob_type;
+ PyObject *r;
+ if (t->tp_as_sequence && t->tp_as_sequence->sq_item)
+ r = PySequence_GetItem(o, i);
+ else {
+ PyObject *j = PyInt_FromLong(i);
+ if (!j)
+ return 0;
+ r = PyObject_GetItem(o, j);
+ Py_DECREF(j);
+ }
+ return r;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+setitem_int_utility_code = [
+"""
+static int __Pyx_SetItemInt(PyObject *o, Py_ssize_t i, PyObject *v); /*proto*/
+""","""
+static int __Pyx_SetItemInt(PyObject *o, Py_ssize_t i, PyObject *v) {
+ PyTypeObject *t = o->ob_type;
+ int r;
+ if (t->tp_as_sequence && t->tp_as_sequence->sq_item)
+ r = PySequence_SetItem(o, i, v);
+ else {
+ PyObject *j = PyInt_FromLong(i);
+ if (!j)
+ return -1;
+ r = PyObject_SetItem(o, j, v);
+ Py_DECREF(j);
+ }
+ return r;
+}
+"""]
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Filenames.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Filenames.py
new file mode 100644
index 00000000..09092e28
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Filenames.py
@@ -0,0 +1,9 @@
+#
+# Pyrex - Filename suffixes
+#
+
+cplus_suffix = ".cpp"
+pxd_suffixes = (".pxd",)
+pyx_suffixes = (".pyx", ".pyx+")
+package_init_files = ("__init__.py", "__init__.pyx", "__init__.pyx+")
+pyx_to_c_suffix = {".pyx": ".c", ".pyx+": cplus_suffix}
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.pickle b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.pickle
new file mode 100644
index 00000000..1631a72c
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.pickle
Binary files differ
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.py
new file mode 100644
index 00000000..ca303c0d
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Lexicon.py
@@ -0,0 +1,145 @@
+#
+# Pyrex Scanner - Lexical Definitions
+#
+# Changing anything in this file will cause Lexicon.pickle
+# to be rebuilt next time pyrexc is run.
+#
+
+string_prefixes = "cCrR"
+
+def make_lexicon():
+ from Pyrex.Plex import \
+ Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
+ TEXT, IGNORE, State, Lexicon
+ from Scanning import Method
+
+ letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
+ digit = Any("0123456789")
+ octdigit = Any("01234567")
+ hexdigit = Any("0123456789ABCDEFabcdef")
+ indentation = Bol + Rep(Any(" \t"))
+
+ decimal = Rep1(digit)
+ dot = Str(".")
+ exponent = Any("Ee") + Opt(Any("+-")) + decimal
+ decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
+
+ name = letter + Rep(letter | digit)
+ intconst = decimal | (Str("0x") + Rep1(hexdigit))
+ longconst = intconst + Str("L")
+ fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
+ imagconst = (intconst | fltconst) + Any("jJ")
+
+# sq_string = (
+# Str("'") +
+# Rep(AnyBut("\\\n'") | (Str("\\") + AnyChar)) +
+# Str("'")
+# )
+#
+# dq_string = (
+# Str('"') +
+# Rep(AnyBut('\\\n"') | (Str("\\") + AnyChar)) +
+# Str('"')
+# )
+#
+# non_sq = AnyBut("'") | (Str('\\') + AnyChar)
+# tsq_string = (
+# Str("'''")
+# + Rep(non_sq | (Str("'") + non_sq) | (Str("''") + non_sq))
+# + Str("'''")
+# )
+#
+# non_dq = AnyBut('"') | (Str('\\') + AnyChar)
+# tdq_string = (
+# Str('"""')
+# + Rep(non_dq | (Str('"') + non_dq) | (Str('""') + non_dq))
+# + Str('"""')
+# )
+#
+# stringlit = Opt(Any(string_prefixes)) + (sq_string | dq_string | tsq_string| tdq_string)
+
+ beginstring = Opt(Any(string_prefixes)) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
+ two_oct = octdigit + octdigit
+ three_oct = octdigit + octdigit + octdigit
+ two_hex = hexdigit + hexdigit
+ escapeseq = Str("\\") + (two_oct | three_oct | two_hex | AnyChar)
+
+ bra = Any("([{")
+ ket = Any(")]}")
+ punct = Any(":,;+-*/|&<>=.%`~^?")
+ diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**",
+ "+=", "-=", "*=", "/=", "%=", "**=", "<<=", ">>=", "&=", "^=", "|=")
+ spaces = Rep1(Any(" \t\f"))
+ comment = Str("#") + Rep(AnyBut("\n"))
+ escaped_newline = Str("\\\n")
+ lineterm = Eol + Opt(Str("\n"))
+
+ return Lexicon([
+ (name, 'IDENT'),
+ (intconst, 'INT'),
+ (longconst, 'LONG'),
+ (fltconst, 'FLOAT'),
+ (imagconst, 'IMAG'),
+ (punct | diphthong, TEXT),
+
+ (bra, Method('open_bracket_action')),
+ (ket, Method('close_bracket_action')),
+ (lineterm, Method('newline_action')),
+
+ #(stringlit, 'STRING'),
+ (beginstring, Method('begin_string_action')),
+
+ (comment, IGNORE),
+ (spaces, IGNORE),
+ (escaped_newline, IGNORE),
+
+ State('INDENT', [
+ (Opt(spaces) + Opt(comment) + lineterm, IGNORE),
+ (indentation, Method('indentation_action')),
+ (Eof, Method('eof_action'))
+ ]),
+
+ State('SQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
+ (Str('"'), 'CHARS'),
+ (Str("\n"), Method('unclosed_string_action')),
+ (Str("'"), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('DQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut('"\n\\')), 'CHARS'),
+ (Str("'"), 'CHARS'),
+ (Str("\n"), Method('unclosed_string_action')),
+ (Str('"'), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('TSQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
+ (Any("'\""), 'CHARS'),
+ (Str("\n"), 'NEWLINE'),
+ (Str("'''"), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('TDQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut('"\'\n\\')), 'CHARS'),
+ (Any("'\""), 'CHARS'),
+ (Str("\n"), 'NEWLINE'),
+ (Str('"""'), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ (Eof, Method('eof_action'))
+ ],
+
+ # FIXME: Plex 1.9 needs different args here from Plex 1.1.4
+ #debug_flags = scanner_debug_flags,
+ #debug_file = scanner_dump_file
+ )
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Main.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Main.py
new file mode 100644
index 00000000..2769b771
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Main.py
@@ -0,0 +1,564 @@
+#
+# Pyrex Top Level
+#
+
+import os, re, sys
+if sys.version_info[:2] < (2, 3):
+ print >>sys.stderr, "Sorry, Pyrex requires Python 2.3 or later"
+ sys.exit(1)
+
+import os
+from time import time
+import Builtin
+import Code
+import Errors
+import Parsing
+import Version
+from Errors import PyrexError, CompileError, error
+from Scanning import PyrexScanner
+from Symtab import BuiltinScope, DefinitionScope, ImplementationScope
+from Pyrex.Utils import set, replace_suffix, modification_time, \
+ file_newer_than, castrate_file, map_suffix
+from Filenames import cplus_suffix, pxd_suffixes, pyx_suffixes, \
+ package_init_files, pyx_to_c_suffix
+
+verbose = 0
+debug_timestamps = 0
+
+module_name_pattern = re.compile(
+ r"[A-Za-z_][A-Za-z0-9_]*(\.[A-Za-z_][A-Za-z0-9_]*)*$")
+
+class Context:
+ # This class encapsulates the context needed for compiling
+ # one or more Pyrex implementation files along with their
+ # associated and imported declaration files. It holds
+ # the root of the module import namespace and the list
+ # of directories to search for include files.
+ #
+ # modules {string : DefinitionScope}
+ # include_directories [string]
+
+ def __init__(self, include_directories):
+ self.modules = {"__builtin__" : Builtin.builtin_scope}
+ self.include_directories = include_directories
+
+ def find_module(self, module_name,
+ relative_to = None, pos = None, need_pxd = 1):
+ # Finds and returns the module scope corresponding to
+ # the given relative or absolute module name. If this
+ # is the first time the module has been requested, finds
+ # the corresponding .pxd file and process it.
+ # If relative_to is not None, it must be a module scope,
+ # and the module will first be searched for relative to
+ # that module, provided its name is not a dotted name.
+ debug_find_module = 0
+ if debug_find_module:
+ print "Context.find_module: module_name =", module_name, \
+ "relative_to =", relative_to, "pos =", pos, "need_pxd =", need_pxd
+ scope = None
+ pxd_pathname = None
+ if "." not in module_name and relative_to:
+ if debug_find_module:
+ print "...trying relative import"
+ scope = relative_to.lookup_submodule(module_name)
+ if not scope:
+ qualified_name = relative_to.qualify_name(module_name)
+ pxd_pathname = self.find_pxd_file(qualified_name, pos)
+ if pxd_pathname:
+ scope = relative_to.find_submodule(module_name)
+ if not scope:
+ if debug_find_module:
+ print "...trying absolute import"
+ scope = self
+ for name in module_name.split("."):
+ scope = scope.find_submodule(name)
+ if debug_find_module:
+ print "...scope =", scope
+ if not scope.pxd_file_loaded:
+ if debug_find_module:
+ print "...pxd not loaded"
+ scope.pxd_file_loaded = 1
+ if not pxd_pathname:
+ if debug_find_module:
+ print "...looking for pxd file"
+ pxd_pathname = self.find_pxd_file(module_name, pos)
+ if debug_find_module:
+ print "......found ", pxd_pathname
+ if not pxd_pathname and need_pxd:
+ error(pos, "Cannot find .pxd file for module '%s'" % module_name)
+ if pxd_pathname:
+ try:
+ if debug_find_module:
+ print "Context.find_module: Parsing", pxd_pathname
+ pxd_tree = self.parse(pxd_pathname, scope, pxd = 1)
+ pxd_tree.analyse_declarations(scope)
+ except CompileError:
+ pass
+ return scope
+
+ def find_pxd_file(self, qualified_name, pos):
+ # Search include path for the .pxd file corresponding to the
+ # given fully-qualified module name.
+ # Will find either a dotted filename or a file in a
+ # package directory. If a source file position is given,
+ # the directory containing the source file is searched first
+ # for a dotted filename, and its containing package root
+ # directory is searched first for a non-dotted filename.
+ return self.search_package_directories(qualified_name, pxd_suffixes, pos)
+
+ def find_pyx_file(self, qualified_name, pos):
+ # Search include path for the .pyx file corresponding to the
+ # given fully-qualified module name, as for find_pxd_file().
+ return self.search_package_directories(qualified_name, pyx_suffixes, pos)
+
+ def search_package_directories(self, qualified_name, suffixes, pos):
+ dotted_filenames = [qualified_name + suffix for suffix in suffixes]
+ if pos:
+ here = os.path.dirname(pos[0])
+ for dotted_filename in dotted_filenames:
+ path = os.path.join(here, dotted_filename)
+ if os.path.exists(path):
+ return path
+ dirs = self.include_directories
+ if pos:
+ here = self.find_root_package_dir(pos[0])
+ dirs = [here] + dirs
+ names = qualified_name.split(".")
+ package_names = names[:-1]
+ module_name = names[-1]
+ filenames = [module_name + suffix for suffix in suffixes]
+ for root in dirs:
+ for dotted_filename in dotted_filenames:
+ path = os.path.join(root, dotted_filename)
+ if os.path.exists(path):
+ return path
+ dir = self.descend_to_package_dir(root, package_names)
+ if dir:
+ for filename in filenames:
+ path = os.path.join(dir, filename)
+ if os.path.exists(path):
+ return path
+ for init_filename in package_init_files:
+ path = os.path.join(dir, module_name, init_filename)
+ if os.path.exists(path):
+ return path
+
+ def find_root_package_dir(self, file_path):
+ # Given the full pathname of a source file, find the directory
+ # containing the top-level package that it ultimately belongs to.
+ dir = os.path.dirname(file_path)
+ while 1:
+ if not self.is_package_dir(dir):
+ return dir
+ parent = os.path.dirname(dir)
+ if parent == dir:
+ return dir
+ dir = parent
+
+ def descend_to_package_dir(self, root_dir, package_names):
+ # Starting from the given root directory, look for a nested
+ # succession of package directories. Returns the full pathname
+ # of the innermost one, or None.
+ dir = root_dir
+ for name in package_names:
+ dir = os.path.join(dir, name)
+ if self.is_package_dir(dir):
+ return dir
+
+ def is_package_dir(self, dir_path):
+ # Return true if the given directory is a package directory.
+ for filename in package_init_files:
+ path = os.path.join(dir_path, filename)
+ if os.path.exists(path):
+ return 1
+
+ def find_include_file(self, filename, pos):
+ # Search list of include directories for filename.
+ # Reports an error and returns None if not found.
+ path = self.search_include_directories(filename, pos)
+ if not path:
+ error(pos, "'%s' not found" % filename)
+ return path
+
+ def search_include_directories(self, filename, pos):
+ # Search the list of include directories for the given
+ # file name. If a source file position is given, first
+ # searches the directory containing that file. Returns
+ # None if not found, but does not report an error.
+ dirs = self.include_directories
+ if pos:
+ here_dir = os.path.dirname(pos[0])
+ dirs = [here_dir] + dirs
+ for dir in dirs:
+ path = os.path.join(dir, filename)
+ if os.path.exists(path):
+ return path
+ return None
+
+ def lookup_submodule(self, name):
+ # Look up a top-level module. Returns None if not found.
+ return self.modules.get(name, None)
+
+ def find_submodule(self, name):
+ # Find a top-level module, creating a new one if needed.
+ scope = self.lookup_submodule(name)
+ if not scope:
+ scope = DefinitionScope(name,
+ parent_module = None, context = self)
+ self.modules[name] = scope
+ return scope
+
+ def parse(self, source_filename, scope, pxd):
+ # Parse the given source file and return a parse tree.
+ f = open(source_filename, "rU")
+ s = PyrexScanner(f, source_filename, scope = scope, context = self)
+ try:
+ tree = Parsing.p_module(s, pxd)
+ finally:
+ f.close()
+ if Errors.num_errors > 0:
+ raise CompileError
+ return tree
+
+ def extract_module_name(self, path):
+ # Find fully_qualified module name from the full pathname
+ # of a source file.
+ dir, filename = os.path.split(path)
+ module_name, _ = os.path.splitext(filename)
+ if "." not in module_name:
+ if module_name == "__init__":
+ dir, module_name = os.path.split(dir)
+ names = [module_name]
+ while self.is_package_dir(dir):
+ parent, package_name = os.path.split(dir)
+ if parent == dir:
+ break
+ names.insert(0, package_name)
+ dir = parent
+ module_name = ".".join(names)
+ if not module_name_pattern.match(module_name):
+ raise CompileError((path, 0, 0),
+ "'%s' is not a valid module name" % module_name)
+ return module_name
+
+ def dep_file_out_of_date(self, source_path):
+ dep_path = replace_suffix(source_path, ".dep")
+ if not os.path.exists(dep_path):
+ return 1
+ dep_time = modification_time(dep_path)
+ return file_newer_than(source_path, dep_time)
+
+ def c_file_out_of_date(self, source_path):
+ if debug_timestamps:
+ print "Checking whether", source_path, "is out of date"
+ c_path = map_suffix(source_path, pyx_to_c_suffix, ".c")
+ if not os.path.exists(c_path):
+ if debug_timestamps:
+ print "...yes, c file doesn't exist"
+ return 1
+ c_time = modification_time(c_path)
+ if file_newer_than(source_path, c_time):
+ if debug_timestamps:
+ print "...yes, newer than c file"
+ return 1
+ pos = [source_path]
+ module_name = self.extract_module_name(source_path)
+ pxd_path = self.find_pxd_file(module_name, pos)
+ if pxd_path and file_newer_than(pxd_path, c_time):
+ if debug_timestamps:
+ print "...yes, pxd file newer than c file"
+ return 1
+ dep_path = replace_suffix(source_path, ".dep")
+ if not os.path.exists(dep_path):
+ if debug_timestamps:
+ print "...yes, dep file does not exist"
+ return 1
+ for kind, name in self.read_dependency_file(source_path):
+ if kind == "cimport":
+ dep_path = self.find_pxd_file(name, pos)
+ elif kind == "include":
+ dep_path = self.search_include_directories(name, pos)
+ else:
+ continue
+ if dep_path and file_newer_than(dep_path, c_time):
+ if debug_timestamps:
+ print "...yes,", dep_path, "newer than c file"
+ return 1
+ if debug_timestamps:
+ print "...no"
+
+ def find_cimported_module_names(self, source_path):
+ for kind, name in self.read_dependency_file(source_path):
+ if kind == "cimport":
+ yield name
+
+ def read_dependency_file(self, source_path):
+ dep_path = replace_suffix(source_path, ".dep")
+ if os.path.exists(dep_path):
+ f = open(dep_path, "rU")
+ for line in f.readlines():
+ chunks = line.strip().split(" ", 1)
+ if len(chunks) == 2:
+ yield chunks
+ f.close()
+
+ def compile(self, source, options = None):
+ # Compile a Pyrex implementation file in this context
+ # and return a CompilationResult.
+ if not options:
+ options = default_options
+ result = CompilationResult()
+ cwd = os.getcwd()
+ source = os.path.join(cwd, source)
+ if options.use_listing_file:
+ result.listing_file = replace_suffix(source, ".lis")
+ Errors.open_listing_file(result.listing_file,
+ echo_to_stderr = options.errors_to_stderr)
+ else:
+ Errors.open_listing_file(None)
+ if options.output_file:
+ result.c_file = os.path.join(cwd, options.output_file)
+ else:
+ if options.cplus:
+ result.c_file = replace_suffix(source, cplus_suffix)
+ else:
+ result.c_file = map_suffix(source, pyx_to_c_suffix, ".c")
+ module_name = self.extract_module_name(source)
+ initial_pos = (source, 1, 0)
+ def_scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0)
+ imp_scope = ImplementationScope(def_scope)
+ errors_occurred = False
+ try:
+ tree = self.parse(source, imp_scope, pxd = 0)
+ tree.process_implementation(imp_scope, options, result)
+ except CompileError:
+ errors_occurred = True
+ Errors.close_listing_file()
+ result.num_errors = Errors.num_errors
+ if result.num_errors > 0:
+ errors_occurred = True
+ if errors_occurred and result.c_file:
+ try:
+ st = os.stat(source)
+ castrate_file(result.c_file, st)
+ except EnvironmentError:
+ pass
+ result.c_file = None
+ if result.c_file and not options.c_only and c_compile:
+ result.object_file = c_compile(result.c_file,
+ verbose_flag = options.show_version,
+ cplus = options.cplus)
+ if not options.obj_only and c_link:
+ result.extension_file = c_link(result.object_file,
+ extra_objects = options.objects,
+ verbose_flag = options.show_version,
+ cplus = options.cplus)
+ return result
+
+#------------------------------------------------------------------------
+#
+# Main Python entry points
+#
+#------------------------------------------------------------------------
+
+class CompilationOptions:
+ """
+ Options to the Pyrex compiler:
+
+ show_version boolean Display version number
+ use_listing_file boolean Generate a .lis file
+ errors_to_stderr boolean Echo errors to stderr when using .lis
+ include_path [string] Directories to search for include files
+ output_file string Name of generated .c file
+ generate_pxi boolean Generate .pxi file for public declarations
+ recursive boolean Recursively find and compile dependencies
+ timestamps boolean Only compile changed source files. If None,
+ defaults to true when recursive is true.
+ verbose boolean Always print source names being compiled
+ quiet boolean Don't print source names in recursive mode
+
+ Following options are experimental and only used on MacOSX:
+
+ c_only boolean Stop after generating C file (default)
+ obj_only boolean Stop after compiling to .o file
+ objects [string] Extra .o files to link with
+ cplus boolean Compile as c++ code
+ """
+
+ def __init__(self, defaults = None, c_compile = 0, c_link = 0, **kw):
+ self.include_path = []
+ self.objects = []
+ if defaults:
+ if isinstance(defaults, CompilationOptions):
+ defaults = defaults.__dict__
+ else:
+ defaults = default_options
+ self.__dict__.update(defaults)
+ self.__dict__.update(kw)
+ if c_compile:
+ self.c_only = 0
+ if c_link:
+ self.obj_only = 0
+
+
+class CompilationResult:
+ """
+ Results from the Pyrex compiler:
+
+ c_file string or None The generated C source file
+ h_file string or None The generated C header file
+ i_file string or None The generated .pxi file
+ api_file string or None The generated C API .h file
+ listing_file string or None File of error messages
+ object_file string or None Result of compiling the C file
+ extension_file string or None Result of linking the object file
+ num_errors integer Number of compilation errors
+ """
+
+ def __init__(self):
+ self.c_file = None
+ self.h_file = None
+ self.i_file = None
+ self.api_file = None
+ self.listing_file = None
+ self.object_file = None
+ self.extension_file = None
+
+
+class CompilationResultSet(dict):
+ """
+ Results from compiling multiple Pyrex source files. A mapping
+ from source file paths to CompilationResult instances. Also
+ has the following attributes:
+
+ num_errors integer Total number of compilation errors
+ """
+
+ num_errors = 0
+
+ def add(self, source, result):
+ self[source] = result
+ self.num_errors += result.num_errors
+
+
+def compile_single(source, options):
+ """
+ compile_single(source, options)
+
+ Compile the given Pyrex implementation file and return a CompilationResult.
+ Always compiles a single file; does not perform timestamp checking or
+ recursion.
+ """
+ context = Context(options.include_path)
+ return context.compile(source, options)
+
+def compile_multiple(sources, options):
+ """
+ compile_multiple(sources, options)
+
+ Compiles the given sequence of Pyrex implementation files and returns
+ a CompilationResultSet. Performs timestamp checking and/or recursion
+ if these are specified in the options.
+ """
+ sources = [os.path.abspath(source) for source in sources]
+ processed = set()
+ results = CompilationResultSet()
+ context = Context(options.include_path)
+ recursive = options.recursive
+ timestamps = options.timestamps
+ if timestamps is None:
+ timestamps = recursive
+ verbose = options.verbose or ((recursive or timestamps) and not options.quiet)
+ for source in sources:
+ if source not in processed:
+ if not timestamps or context.c_file_out_of_date(source):
+ if verbose:
+ print >>sys.stderr, "Compiling", source
+ result = context.compile(source, options)
+ results.add(source, result)
+ processed.add(source)
+ if recursive:
+ for module_name in context.find_cimported_module_names(source):
+ path = context.find_pyx_file(module_name, [source])
+ if path:
+ sources.append(path)
+ else:
+ print >>sys.stderr, \
+ "Cannot find .pyx file for cimported module '%s'" % module_name
+ return results
+
+def compile(source, options = None, c_compile = 0, c_link = 0, **kwds):
+ """
+ compile(source [, options], [, <option> = <value>]...)
+
+ Compile one or more Pyrex implementation files, with optional timestamp
+ checking and recursing on dependecies. The source argument may be a string
+ or a sequence of strings. If it is a string and no recursion or timestamp
+ checking is requested, a CompilationResult is returned, otherwise a
+ CompilationResultSet is returned.
+ """
+ options = CompilationOptions(defaults = options, c_compile = c_compile,
+ c_link = c_link, **kwds)
+ if isinstance(source, basestring) and not options.timestamps \
+ and not options.recursive:
+ return compile_single(source, options)
+ else:
+ return compile_multiple(source, options)
+
+#------------------------------------------------------------------------
+#
+# Main command-line entry point
+#
+#------------------------------------------------------------------------
+
+def main(command_line = 0):
+ args = sys.argv[1:]
+ any_failures = 0
+ if command_line:
+ from CmdLine import parse_command_line
+ options, sources = parse_command_line(args)
+ else:
+ options = CompilationOptions(default_options)
+ sources = args
+ if options.show_version:
+ print >>sys.stderr, "Pyrex version %s" % Version.version
+ try:
+ result = compile(sources, options)
+ if result.num_errors > 0:
+ any_failures = 1
+ except EnvironmentError, e:
+ print >>sys.stderr, e
+ any_failures = 1
+ if any_failures:
+ sys.exit(1)
+
+#------------------------------------------------------------------------
+#
+# Set the default options depending on the platform
+#
+#------------------------------------------------------------------------
+
+default_options = dict(
+ show_version = 0,
+ use_listing_file = 0,
+ errors_to_stderr = 1,
+ c_only = 1,
+ obj_only = 1,
+ cplus = 0,
+ output_file = None,
+ generate_pxi = 0,
+ recursive = 0,
+ timestamps = None,
+ verbose = 0,
+ quiet = 0)
+
+if sys.platform == "mac":
+ from Pyrex.Mac.MacSystem import c_compile, c_link, CCompilerError
+ default_options['use_listing_file'] = 1
+elif sys.platform == "darwin":
+ from Pyrex.Mac.DarwinSystem import c_compile, c_link, CCompilerError
+else:
+ c_compile = None
+ c_link = None
+
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ModuleNode.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ModuleNode.py
new file mode 100644
index 00000000..9c2b0a31
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/ModuleNode.py
@@ -0,0 +1,1678 @@
+#
+# Pyrex - Module parse tree node
+#
+
+import os, time
+from cStringIO import StringIO
+from PyrexTypes import CPtrType, py_object_type, typecast
+from Pyrex.Utils import set
+
+# Following is set by Testing.py to suppress filename/date comments
+# in generated files, so as not to produce spurious changes in test
+# reference files.
+
+testing_mode = False
+
+
+import Code
+import Naming
+import Nodes
+import Options
+import PyrexTypes
+import TypeSlots
+import Version
+
+from Errors import error
+from PyrexTypes import py_object_type
+from Pyrex.Utils import open_new_file, replace_suffix
+
+class ModuleNode(Nodes.Node, Nodes.BlockNode):
+ # doc string or None
+ # body StatListNode
+ #
+ # referenced_modules [ModuleScope]
+ # module_temp_cname string
+
+ def analyse_declarations(self, env):
+ env.doc = self.doc
+ self.body.analyse_declarations(env)
+
+ def process_implementation(self, env, options, result):
+ self.analyse_declarations(env)
+ env.check_c_classes()
+ self.body.analyse_expressions(env)
+ env.return_type = PyrexTypes.c_void_type
+ self.referenced_modules = self.find_referenced_modules(env)
+ if self.has_imported_c_functions():
+ self.module_temp_cname = env.allocate_temp_pyobject()
+ env.release_temp(self.module_temp_cname)
+ if options.timestamps or options.recursive:
+ self.generate_dep_file(env, result)
+ self.generate_c_code(env, result)
+ self.generate_h_code(env, options, result)
+ self.generate_api_code(env, result)
+
+ def has_imported_c_functions(self):
+ for module in self.referenced_modules:
+ for entry in module.cfunc_entries:
+ if entry.defined_in_pxd:
+ return 1
+ return 0
+
+ def generate_dep_file(self, env, result):
+ modules = self.referenced_modules
+ includes = set(env.pyrex_include_files)
+ for module in modules:
+ for include in module.pyrex_include_files:
+ includes.add(include)
+ if len(modules) > 1 or includes:
+ include_list = list(includes)
+ include_list.sort()
+ dep_file = replace_suffix(result.c_file, ".dep")
+ f = open(dep_file, "w")
+ try:
+ for module in modules[:-1]:
+ f.write("cimport %s\n" % module.qualified_name)
+ for path in include_list:
+ f.write("include %s\n" % path)
+ finally:
+ f.close()
+
+ def generate_h_code(self, env, options, result):
+ def pub(entries): #, pxd = 0):
+ return [entry for entry in entries
+ if entry.visibility == 'public'] # or pxd and entry.defined_in_pxd]
+ denv = env.definition_scope
+ h_types = pub(denv.type_entries) + pub(env.type_entries)
+ h_vars = pub(denv.var_entries) + pub(env.var_entries)
+ h_funcs = pub(denv.cfunc_entries) + pub(env.cfunc_entries)
+ h_extension_types = pub(denv.c_class_entries) + pub(env.c_class_entries)
+ if h_types or h_vars or h_funcs or h_extension_types:
+ result.h_file = replace_suffix(result.c_file, ".h")
+ h_code = Code.CCodeWriter(open_new_file(result.h_file))
+ if options.generate_pxi:
+ result.i_file = replace_suffix(result.c_file, ".pxi")
+ i_code = Code.PyrexCodeWriter(result.i_file)
+ else:
+ i_code = None
+ guard = Naming.h_guard_prefix + env.qualified_name.replace(".", "__")
+ h_code.put_h_guard(guard)
+ self.generate_extern_c_macro_definition(h_code)
+ self.generate_type_header_code(h_types, h_code)
+ h_code.putln("")
+ h_code.putln("#ifndef %s" % Naming.api_guard_prefix + self.api_name(env))
+ if h_vars:
+ h_code.putln("")
+ for entry in h_vars:
+ self.generate_public_declaration(entry, h_code, i_code)
+ if h_funcs:
+ h_code.putln("")
+ for entry in h_funcs:
+ self.generate_public_declaration(entry, h_code, i_code)
+ if h_extension_types:
+ h_code.putln("")
+ for entry in h_extension_types:
+ self.generate_cclass_header_code(entry.type, h_code)
+ if i_code:
+ self.generate_cclass_include_code(entry.type, i_code)
+ h_code.putln("")
+ h_code.putln("#endif")
+ h_code.putln("")
+ h_code.putln("PyMODINIT_FUNC init%s(void);" % env.module_name)
+ h_code.putln("")
+ h_code.putln("#endif")
+
+ def generate_public_declaration(self, entry, h_code, i_code):
+ h_code.putln("%s %s;" % (
+ Naming.extern_c_macro,
+ entry.type.declaration_code(
+ entry.cname, dll_linkage = "DL_IMPORT")))
+ if i_code:
+ i_code.putln("cdef extern %s" %
+ entry.type.declaration_code(entry.cname, pyrex = 1))
+
+ def api_name(self, env):
+ return env.qualified_name.replace(".", "__")
+
+ def generate_api_code(self, env, result):
+ denv = env.definition_scope
+ api_funcs = []
+ public_extension_types = []
+ has_api_extension_types = 0
+ for entry in denv.cfunc_entries:
+ if entry.api:
+ api_funcs.append(entry)
+ for entry in env.cfunc_entries:
+ if entry.api:
+ api_funcs.append(entry)
+ for entry in denv.c_class_entries + env.c_class_entries:
+ if entry.visibility == 'public':
+ public_extension_types.append(entry)
+ if entry.api:
+ has_api_extension_types = 1
+ if api_funcs or has_api_extension_types:
+ result.api_file = replace_suffix(result.c_file, "_api.h")
+ h_code = Code.CCodeWriter(open_new_file(result.api_file))
+ name = self.api_name(env)
+ guard = Naming.api_guard_prefix + name
+ h_code.put_h_guard(guard)
+ h_code.putln('#include "Python.h"')
+ if result.h_file:
+ h_code.putln('#include "%s"' % os.path.basename(result.h_file))
+ for entry in public_extension_types:
+ type = entry.type
+ h_code.putln("")
+ h_code.putln("static PyTypeObject *%s;" % type.typeptr_cname)
+ h_code.putln("#define %s (*%s)" % (
+ type.typeobj_cname, type.typeptr_cname))
+ if api_funcs:
+ h_code.putln("")
+ for entry in api_funcs:
+ type = CPtrType(entry.type)
+ h_code.putln("static %s;" % type.declaration_code(entry.cname))
+ h_code.putln("")
+ h_code.put_h_guard(Naming.api_func_guard + "import_module")
+ h_code.put(import_module_utility_code[1])
+ h_code.putln("")
+ h_code.putln("#endif")
+ if api_funcs:
+ h_code.putln("")
+ h_code.put(function_import_utility_code[1])
+ if public_extension_types:
+ h_code.putln("")
+ h_code.put(type_import_utility_code[1])
+ h_code.putln("")
+ h_code.putln("static int import_%s(void) {" % name)
+ h_code.putln("PyObject *module = 0;")
+ h_code.putln('module = __Pyx_ImportModule("%s");' % env.qualified_name)
+ h_code.putln("if (!module) goto bad;")
+ for entry in api_funcs:
+ sig = entry.type.signature_string()
+ h_code.putln(
+ 'if (__Pyx_ImportFunction(module, "%s", (void**)&%s, "%s") < 0) goto bad;' % (
+ entry.name,
+ entry.cname,
+ sig))
+ h_code.putln("Py_DECREF(module); module = 0;")
+ for entry in public_extension_types:
+ self.generate_type_import_call(entry.type, h_code, "goto bad;")
+ h_code.putln("return 0;")
+ h_code.putln("bad:")
+ h_code.putln("Py_XDECREF(module);")
+ h_code.putln("return -1;")
+ h_code.putln("}")
+ h_code.putln("")
+ h_code.putln("#endif")
+
+ def generate_cclass_header_code(self, type, h_code):
+ h_code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ type.typeobj_cname))
+ #self.generate_obj_struct_definition(type, h_code)
+
+ def generate_cclass_include_code(self, type, i_code):
+ i_code.putln("cdef extern class %s.%s:" % (
+ type.module_name, type.name))
+ i_code.indent()
+ var_entries = type.scope.var_entries
+ if var_entries:
+ for entry in var_entries:
+ i_code.putln("cdef %s" %
+ entry.type.declaration_code(entry.cname, pyrex = 1))
+ else:
+ i_code.putln("pass")
+ i_code.dedent()
+
+ def generate_c_code(self, env, result):
+ code = Code.MainCCodeWriter(StringIO())
+ code.h = Code.CCodeWriter(StringIO())
+ code.init_labels()
+
+ modules = self.referenced_modules
+ self.generate_module_preamble(env, modules, code.h)
+
+ code.putln("")
+ code.putln("/* Implementation of %s */" % env.qualified_name)
+ #self.generate_const_definitions(env, code)
+ #self.generate_interned_name_decls(env, code)
+ #self.generate_py_string_decls(env, code)
+ self.body.generate_function_definitions(env, code)
+ #self.generate_interned_name_table(env, code)
+ #self.generate_py_string_table(env, code)
+ self.generate_typeobj_definitions(env, code)
+ self.generate_method_table(env, code)
+ self.generate_filename_init_prototype(code)
+ self.generate_module_init_func(modules[:-1], env, code)
+ self.generate_filename_table(code)
+ self.generate_utility_functions(code)
+
+ denv = env.definition_scope
+ for module in modules:
+ code.h.putln("")
+ code.h.putln("/* Declarations from %s */" % module.qualified_name)
+ self.generate_declarations_for_module(module, code.h,
+ implementation = module is denv)
+
+ code.h.putln("")
+ code.h.putln("/* Declarations from implementation of %s */" %
+ env.qualified_name)
+ self.generate_declarations_for_module(env, code.h, implementation = 1)
+ code.global_state.generate_const_declarations(code.h)
+ #self.generate_interned_name_table(code.interned_strings, code.h)
+ #self.generate_py_string_table(code.py_strings, code.h)
+ self.generate_default_value_declarations(env, code.h)
+
+ f = open_new_file(result.c_file)
+ f.write(code.h.f.getvalue())
+ f.write("\n")
+ f.write(code.f.getvalue())
+ f.close()
+ result.c_file_generated = 1
+
+ def find_referenced_modules(self, env):
+ # Given the ImplementationScope, find the DefinitionScopes of all
+ # modules cimported, directly or indirectly. Includes this module's
+ # DefinitionScope as the last entry in the list.
+ denv = env.definition_scope
+ module_list = []
+ modules_seen = set()
+ def add_module(module):
+ if module not in modules_seen:
+ modules_seen.add(module)
+ add_modules(module.cimported_modules)
+ module_list.append(module)
+ def add_modules(modules):
+ for module in modules:
+ add_module(module)
+ modules_seen.add(denv)
+ add_modules(denv.cimported_modules)
+ add_modules(env.cimported_modules)
+ module_list.append(denv)
+ #self.print_referenced_modules(module_list) ###
+ return module_list
+
+ def print_referenced_modules(self, module_list):
+ print "find_referenced_modules: result =",
+ for m in module_list:
+ print m,
+ print
+
+ def generate_module_preamble(self, env, cimported_modules, code):
+ comment = "Generated by Pyrex"
+ if not testing_mode:
+ comment = "%s %s on %s" % (comment, Version.version, time.asctime())
+ code.putln('/* %s */' % comment)
+ code.putln('')
+ code.putln('#define PY_SSIZE_T_CLEAN')
+ for filename in env.python_include_files:
+ code.putln('#include "%s"' % filename)
+ code.putln("#ifndef PY_LONG_LONG")
+ code.putln(" #define PY_LONG_LONG LONG_LONG")
+ code.putln("#endif")
+ code.putln("#if PY_VERSION_HEX < 0x02050000")
+ code.putln(" typedef int Py_ssize_t;")
+ code.putln(" #define PY_SSIZE_T_MAX INT_MAX")
+ code.putln(" #define PY_SSIZE_T_MIN INT_MIN")
+ code.putln(" #define PyInt_FromSsize_t(z) PyInt_FromLong(z)")
+ code.putln(" #define PyInt_AsSsize_t(o) PyInt_AsLong(o)")
+ code.putln("#endif")
+ code.putln("#if !defined(WIN32) && !defined(MS_WINDOWS)")
+ code.putln(" #ifndef __stdcall")
+ code.putln(" #define __stdcall")
+ code.putln(" #endif")
+ code.putln(" #ifndef __cdecl")
+ code.putln(" #define __cdecl")
+ code.putln(" #endif")
+ code.putln("#endif")
+ self.generate_extern_c_macro_definition(code)
+ code.putln("#include <math.h>")
+ self.generate_includes(env, cimported_modules, code)
+ code.putln('')
+ code.put(Nodes.utility_function_predeclarations)
+ code.putln('')
+ code.putln('static PyObject *%s;' % env.module_cname)
+ code.putln('static PyObject *%s;' % Naming.builtins_cname)
+ code.putln('static int %s;' % Naming.lineno_cname)
+ code.putln('static char *%s;' % Naming.filename_cname)
+ code.putln('static char **%s;' % Naming.filetable_cname)
+ doc = None
+ doc1 = env.definition_scope.doc
+ doc2 = env.doc
+ if doc1 and doc2:
+ doc = "%s\\n%s" % (doc1, doc2)
+ else:
+ doc = doc1 or doc2
+ if doc:
+ code.putln('')
+ code.putln('static char %s[] = "%s";' % (env.doc_cname, doc))
+
+ def generate_extern_c_macro_definition(self, code):
+ name = Naming.extern_c_macro
+ code.putln("#ifdef __cplusplus")
+ code.putln('#define %s extern "C"' % name)
+ code.putln("#else")
+ code.putln("#define %s extern" % name)
+ code.putln("#endif")
+
+ def generate_includes(self, env, cimported_modules, code):
+ includes = []
+ for module in cimported_modules + [env]:
+ for filename in module.include_files:
+ if filename not in includes:
+ includes.append(filename)
+ for filename in includes:
+ code.putln('#include "%s"' % filename)
+
+ def generate_filename_table(self, code):
+ code.global_state.generate_filename_table(code)
+
+ def generate_declarations_for_module(self, env, code, implementation):
+ self.generate_type_predeclarations(env, code)
+ self.generate_type_definitions(env, code) #, implementation)
+ self.generate_global_declarations(env, code, implementation)
+ self.generate_cfunction_predeclarations(env, code, implementation)
+
+ def generate_type_predeclarations(self, env, code):
+ pass
+
+ def generate_type_header_code(self, type_entries, code):
+ # Generate definitions of structs/unions/enums/typedefs/objstructs.
+ #self.generate_gcc33_hack(env, code) # Is this still needed?
+ #for entry in env.type_entries:
+ for entry in type_entries:
+ if not entry.in_cinclude:
+ #print "generate_type_header_code:", entry.name, repr(entry.type) ###
+ type = entry.type
+ if type.is_typedef: # Must test this first!
+ self.generate_typedef(entry, code)
+ elif type.is_struct_or_union:
+ self.generate_struct_union_definition(entry, code)
+ elif type.is_enum:
+ self.generate_enum_definition(entry, code)
+ elif type.is_extension_type:
+ self.generate_obj_struct_definition(type, code)
+
+ def generate_type_definitions(self, env, code): #, implementation):
+ #print "generate_type_definitions:", env ###
+ type_entries = env.type_entries
+ self.generate_type_header_code(type_entries, code)
+ for entry in env.c_class_entries:
+ if not entry.in_cinclude:
+ self.generate_typeobject_predeclaration(entry, code)
+ self.generate_exttype_vtable_struct(entry, code)
+ self.generate_exttype_vtabptr_declaration(entry, code)
+
+ def generate_typedef(self, entry, code):
+ base_type = entry.type.typedef_base_type
+ code.putln("")
+ code.putln("typedef %s;" % base_type.declaration_code(entry.cname))
+
+ def sue_header_footer(self, type, kind, name):
+ if type.typedef_flag:
+ header = "typedef %s {" % kind
+ footer = "} %s;" % name
+ else:
+ header = "%s %s {" % (kind, name)
+ footer = "};"
+ return header, footer
+
+ def generate_struct_union_definition(self, entry, code):
+ type = entry.type
+ scope = type.scope
+ if scope:
+ header, footer = \
+ self.sue_header_footer(type, type.kind, type.cname)
+ code.putln("")
+ code.putln(header)
+ var_entries = scope.var_entries
+ if not var_entries and not scope.cfunc_entries:
+ error(entry.pos,
+ "Empty struct or union definition not allowed outside a"
+ " 'cdef extern from' block")
+ for attr in var_entries:
+ code.putln(
+ "%s;" %
+ attr.type.declaration_code(attr.cname))
+ code.putln(footer)
+
+ def generate_enum_definition(self, entry, code):
+ type = entry.type
+ name = entry.cname or entry.name or ""
+ header, footer = \
+ self.sue_header_footer(type, "enum", name)
+ code.putln("")
+ code.putln(header)
+ enum_values = entry.enum_values
+ if not enum_values:
+ error(entry.pos,
+ "Empty enum definition not allowed outside a"
+ " 'cdef extern from' block")
+ else:
+ last_entry = enum_values[-1]
+ for value_entry in enum_values:
+ if value_entry.value == value_entry.name:
+ value_code = value_entry.cname
+ else:
+ value_code = ("%s = %s" % (
+ value_entry.cname,
+ value_entry.value))
+ if value_entry is not last_entry:
+ value_code += ","
+ code.putln(value_code)
+ code.putln(footer)
+
+ def generate_typeobject_predeclaration(self, entry, code):
+ code.putln("")
+ name = entry.type.typeobj_cname
+ if name:
+ if entry.visibility == 'extern' and not entry.in_cinclude:
+ code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ name))
+ elif entry.visibility == 'public':
+ #code.putln("DL_EXPORT(PyTypeObject) %s;" % name)
+ code.putln("%s DL_EXPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ name))
+
+ def generate_exttype_vtable_struct(self, entry, code):
+ # Generate struct declaration for an extension type's vtable.
+ type = entry.type
+ scope = type.scope
+ if type.vtabstruct_cname:
+ code.putln("")
+ code.putln(
+ "struct %s {" %
+ type.vtabstruct_cname)
+ if type.base_type and type.base_type.vtabstruct_cname:
+ code.putln("struct %s %s;" % (
+ type.base_type.vtabstruct_cname,
+ Naming.obj_base_cname))
+ for method_entry in scope.cfunc_entries:
+ if not method_entry.is_inherited:
+ code.putln(
+ "%s;" % method_entry.type.declaration_code("(*%s)" % method_entry.name))
+ code.putln(
+ "};")
+
+ def generate_exttype_vtabptr_declaration(self, entry, code):
+ # Generate declaration of pointer to an extension type's vtable.
+ type = entry.type
+ if type.vtabptr_cname:
+ code.putln("static struct %s *%s;" % (
+ type.vtabstruct_cname,
+ type.vtabptr_cname))
+
+ def generate_obj_struct_definition(self, type, code):
+ # Generate object struct definition for an
+ # extension type.
+ if not type.scope:
+ return # Forward declared but never defined
+ header, footer = \
+ self.sue_header_footer(type, "struct", type.objstruct_cname)
+ code.putln("")
+ code.putln(header)
+ base_type = type.base_type
+ if base_type:
+ code.putln(
+ "%s%s %s;" % (
+ ("struct ", "")[base_type.typedef_flag],
+ base_type.objstruct_cname,
+ Naming.obj_base_cname))
+ else:
+ code.putln(
+ "PyObject_HEAD")
+ if type.vtabslot_cname and not (type.base_type and type.base_type.vtabslot_cname):
+ code.putln(
+ "struct %s *%s;" % (
+ type.vtabstruct_cname,
+ type.vtabslot_cname))
+ for attr in type.scope.var_entries:
+ code.putln(
+ "%s;" %
+ attr.type.declaration_code(attr.cname))
+ code.putln(footer)
+
+ def generate_global_declarations(self, env, code, implementation):
+ code.putln("")
+ for entry in env.c_class_entries:
+ if implementation or entry.defined_in_pxd:
+ code.putln("static PyTypeObject *%s = 0;" %
+ entry.type.typeptr_cname)
+ #code.putln("/* var_entries */") ###
+ code.put_var_declarations(env.var_entries, static = 1,
+ dll_linkage = "DL_EXPORT", definition = implementation)
+
+ def generate_default_value_declarations(self, env, code):
+ #code.putln("/* default_entries */") ###
+ code.putln("")
+ code.put_var_declarations(env.default_entries, static = 1)
+
+ def generate_cfunction_predeclarations(self, env, code, implementation):
+ for entry in env.cfunc_entries:
+ if not entry.in_cinclude:
+ # and (definition or entry.defined_in_pxd or
+ # entry.visibility == 'extern'):
+ if entry.visibility in ('public', 'extern'):
+ dll_linkage = "DL_EXPORT"
+ else:
+ dll_linkage = None
+ type = entry.type
+ if not implementation: #and entry.defined_in_pxd:
+ type = CPtrType(type)
+ header = type.declaration_code(entry.cname,
+ dll_linkage = dll_linkage)
+ if entry.visibility <> 'private':
+ storage_class = "%s " % Naming.extern_c_macro
+ else:
+ storage_class = "static "
+ code.putln("%s%s; /*proto*/" % (
+ storage_class,
+ header))
+
+ def generate_typeobj_definitions(self, env, code):
+ full_module_name = env.qualified_name
+ denv = env.definition_scope
+ for entry in denv.c_class_entries + env.c_class_entries:
+ #print "generate_typeobj_definitions:", entry.name
+ #print "...visibility =", entry.visibility
+ if entry.visibility <> 'extern':
+ type = entry.type
+ scope = type.scope
+ if scope: # could be None if there was an error
+ self.generate_exttype_vtable(scope, code)
+ self.generate_new_function(scope, code)
+ self.generate_dealloc_function(scope, code)
+ self.generate_traverse_function(scope, code)
+ self.generate_clear_function(scope, code)
+ if scope.defines_any(["__getitem__"]):
+ self.generate_getitem_int_function(scope, code)
+ if scope.defines_any(["__setitem__", "__delitem__"]):
+ self.generate_ass_subscript_function(scope, code)
+ if scope.defines_any(["__setslice__", "__delslice__"]):
+ self.generate_ass_slice_function(scope, code)
+ if scope.defines_any(["__getattr__"]):
+ self.generate_getattro_function(scope, code)
+ if scope.defines_any(["__setattr__", "__delattr__"]):
+ self.generate_setattro_function(scope, code)
+ if scope.defines_any(["__get__"]):
+ self.generate_descr_get_function(scope, code)
+ if scope.defines_any(["__set__", "__delete__"]):
+ self.generate_descr_set_function(scope, code)
+ self.generate_property_accessors(scope, code)
+ self.generate_method_table(scope, code)
+ self.generate_member_table(scope, code)
+ self.generate_getset_table(scope, code)
+ self.generate_typeobj_definition(full_module_name, entry, code)
+
+ def generate_exttype_vtable(self, scope, code):
+ # Generate the definition of an extension type's vtable.
+ type = scope.parent_type
+ if type.vtable_cname:
+ code.putln("static struct %s %s;" % (
+ type.vtabstruct_cname,
+ type.vtable_cname))
+
+ def generate_self_cast(self, scope, code):
+ type = scope.parent_type
+ code.putln(
+ "%s = (%s)o;" % (
+ type.declaration_code("p"),
+ type.declaration_code("")))
+
+ def generate_new_function(self, scope, code):
+ type = scope.parent_type
+ base_type = type.base_type
+ py_attrs = []
+ for entry in scope.var_entries:
+ if entry.type.is_pyobject:
+ py_attrs.append(entry)
+ need_self_cast = type.vtabslot_cname or py_attrs
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyTypeObject *t, PyObject *a, PyObject *k) {"
+ % scope.mangle_internal("tp_new"))
+ if need_self_cast:
+ code.putln(
+ "%s;"
+ % scope.parent_type.declaration_code("p"))
+ if base_type:
+ code.putln(
+ "PyObject *o = %s->tp_new(t, a, k);" %
+ base_type.typeptr_cname)
+ else:
+ code.putln(
+ "PyObject *o = (*t->tp_alloc)(t, 0);")
+ code.putln(
+ "if (!o) return 0;")
+ if need_self_cast:
+ code.putln(
+ "p = %s;"
+ % type.cast_code("o"))
+ #if need_self_cast:
+ # self.generate_self_cast(scope, code)
+ if type.vtabslot_cname:
+ code.putln("*(struct %s **)&p->%s = %s;" % (
+ type.vtabstruct_cname,
+ type.vtabslot_cname,
+ type.vtabptr_cname))
+ for entry in py_attrs:
+ if entry.name == "__weakref__":
+ code.putln("p->%s = 0;" % entry.cname)
+ else:
+ code.put_init_var_to_py_none(entry, "p->%s")
+ entry = scope.lookup_here("__new__")
+ if entry:
+ code.putln(
+ "if (%s(o, a, k) < 0) {" %
+ entry.func_cname)
+ code.put_decref_clear("o", py_object_type);
+ code.putln(
+ "}")
+ code.putln(
+ "return o;")
+ code.putln(
+ "}")
+
+ def generate_dealloc_function(self, scope, code):
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static void %s(PyObject *o) {"
+ % scope.mangle_internal("tp_dealloc"))
+ #py_attrs = []
+ #for entry in scope.var_entries:
+ # if entry.type.is_pyobject and entry.name <> "__weakref__":
+ # py_attrs.append(entry)
+ py_attrs = scope.pyattr_entries
+ if py_attrs:
+ self.generate_self_cast(scope, code)
+ self.generate_usr_dealloc_call(scope, code)
+ if scope.lookup_here("__weakref__"):
+ code.putln("PyObject_ClearWeakRefs(o);")
+ for entry in py_attrs:
+ code.put_xdecref("p->%s" % entry.cname, entry.type)
+ if base_type:
+ code.putln(
+ "%s->tp_dealloc(o);" %
+ base_type.typeptr_cname)
+ else:
+ code.putln(
+ "(*o->ob_type->tp_free)(o);")
+ code.putln(
+ "}")
+
+ def generate_usr_dealloc_call(self, scope, code):
+ entry = scope.lookup_here("__dealloc__")
+ if entry:
+ code.putln(
+ "{")
+ code.putln(
+ "PyObject *etype, *eval, *etb;")
+ code.putln(
+ "PyErr_Fetch(&etype, &eval, &etb);")
+ code.putln(
+ "++o->ob_refcnt;")
+ code.putln(
+ "%s(o);" %
+ entry.func_cname)
+ code.putln(
+ "if (PyErr_Occurred()) PyErr_WriteUnraisable(o);")
+ code.putln(
+ "--o->ob_refcnt;")
+ code.putln(
+ "PyErr_Restore(etype, eval, etb);")
+ code.putln(
+ "}")
+
+ def generate_traverse_function(self, scope, code):
+ py_attrs = scope.pyattr_entries
+ if py_attrs:
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, visitproc v, void *a) {"
+ % scope.mangle_internal("tp_traverse"))
+ code.putln(
+ "int e;")
+ self.generate_self_cast(scope, code)
+ if base_type:
+ code.putln(
+ "traverseproc t;")
+ code.putln(
+ "if ((t = %s->tp_traverse)) {" %
+ base_type.typeptr_cname)
+ code.putln(
+ "e = t(o, v, a); if (e) return e;")
+ code.putln(
+ "}")
+ for entry in py_attrs:
+ var_code = "p->%s" % entry.cname
+ code.putln(
+ "if (%s) {"
+ % var_code)
+ if entry.type.is_extension_type:
+ var_code = "((PyObject*)%s)" % var_code
+ code.putln(
+ "e = (*v)(%s, a); if (e) return e;"
+ % var_code)
+ code.putln(
+ "}")
+ code.putln(
+ "return 0;")
+ code.putln(
+ "}")
+
+ def generate_clear_function(self, scope, code):
+ py_attrs = scope.pyattr_entries
+ if py_attrs:
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o) {"
+ % scope.mangle_internal("tp_clear"))
+ self.generate_self_cast(scope, code)
+ code.putln(
+ "PyObject *t;")
+ if base_type:
+ code.putln(
+ "inquiry c;")
+ code.putln(
+ "if ((c = %s->tp_clear)) {" %
+ base_type.typeptr_cname)
+ code.putln(
+ "c(o);")
+ code.putln(
+ "}")
+ for entry in py_attrs:
+ name = "p->%s" % entry.cname
+ code.putln(
+ "t = %s; " %
+ typecast(py_object_type, entry.type, name))
+ code.put_init_var_to_py_none(entry, "p->%s")
+ #code.put_xdecref(name, entry.type)
+ code.putln(
+ "Py_XDECREF(t);")
+ code.putln(
+ "return 0;")
+ code.putln(
+ "}")
+
+ def generate_getitem_int_function(self, scope, code):
+ # This function is put into the sq_item slot when
+ # a __getitem__ method is present. It converts its
+ # argument to a Python integer and calls mp_subscript.
+ code.putln(
+ "static PyObject *%s(PyObject *o, Py_ssize_t i) {" %
+ scope.mangle_internal("sq_item"))
+ code.putln(
+ "PyObject *r;")
+ code.putln(
+ "PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0;")
+ code.putln(
+ "r = o->ob_type->tp_as_mapping->mp_subscript(o, x);")
+ code.putln(
+ "Py_DECREF(x);")
+ code.putln(
+ "return r;")
+ code.putln(
+ "}")
+
+ def generate_ass_subscript_function(self, scope, code):
+ # Setting and deleting an item are both done through
+ # the ass_subscript method, so we dispatch to user's __setitem__
+ # or __delitem__, or raise an exception.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setitem__")
+ del_entry = scope.lookup_here("__delitem__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
+ scope.mangle_internal("mp_ass_subscript"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, i, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "Subscript assignment not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, i);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "Subscript deletion not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_guarded_basetype_call(
+ self, base_type, substructure, slot, args, code):
+ if base_type:
+ base_tpname = base_type.typeptr_cname
+ if substructure:
+ code.putln(
+ "if (%s->%s && %s->%s->%s)" % (
+ base_tpname, substructure, base_tpname, substructure, slot))
+ code.putln(
+ " return %s->%s->%s(%s);" % (
+ base_tpname, substructure, slot, args))
+ else:
+ code.putln(
+ "if (%s->%s)" % (
+ base_tpname, slot))
+ code.putln(
+ " return %s->%s(%s);" % (
+ base_tpname, slot, args))
+
+ def generate_ass_slice_function(self, scope, code):
+ # Setting and deleting a slice are both done through
+ # the ass_slice method, so we dispatch to user's __setslice__
+ # or __delslice__, or raise an exception.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setslice__")
+ del_entry = scope.lookup_here("__delslice__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, Py_ssize_t i, Py_ssize_t j, PyObject *v) {" %
+ scope.mangle_internal("sq_ass_slice"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, i, j, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "2-element slice assignment not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, i, j);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "2-element slice deletion not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_getattro_function(self, scope, code):
+ # First try to get the attribute using PyObject_GenericGetAttr.
+ # If that raises an AttributeError, call the user's __getattr__
+ # method.
+ entry = scope.lookup_here("__getattr__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, PyObject *n) {"
+ % scope.mangle_internal("tp_getattro"))
+ code.putln(
+ "PyObject *v = PyObject_GenericGetAttr(o, n);")
+ code.putln(
+ "if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {")
+ code.putln(
+ "PyErr_Clear();")
+ code.putln(
+ "v = %s(o, n);" %
+ entry.func_cname)
+ code.putln(
+ "}")
+ code.putln(
+ "return v;")
+ code.putln(
+ "}")
+
+ def generate_setattro_function(self, scope, code):
+ # Setting and deleting an attribute are both done through
+ # the setattro method, so we dispatch to user's __setattr__
+ # or __delattr__ or fall back on PyObject_GenericSetAttr.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setattr__")
+ del_entry = scope.lookup_here("__delattr__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *n, PyObject *v) {" %
+ scope.mangle_internal("tp_setattro"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, n, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_setattro", "o, n, v", code)
+ code.putln(
+ "return PyObject_GenericSetAttr(o, n, v);")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, n);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_setattro", "o, n, v", code)
+ code.putln(
+ "return PyObject_GenericSetAttr(o, n, 0);")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_descr_get_function(self, scope, code):
+ # The __get__ function of a descriptor object can be
+ # called with NULL for the second or third arguments
+ # under some circumstances, so we replace them with
+ # None in that case.
+ user_get_entry = scope.lookup_here("__get__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, PyObject *i, PyObject *c) {" %
+ scope.mangle_internal("tp_descr_get"))
+ code.putln(
+ "PyObject *r = 0;")
+ code.putln(
+ "if (!i) i = Py_None;")
+ code.putln(
+ "if (!c) c = Py_None;")
+ #code.put_incref("i", py_object_type)
+ #code.put_incref("c", py_object_type)
+ code.putln(
+ "r = %s(o, i, c);" %
+ user_get_entry.func_cname)
+ #code.put_decref("i", py_object_type)
+ #code.put_decref("c", py_object_type)
+ code.putln(
+ "return r;")
+ code.putln(
+ "}")
+
+ def generate_descr_set_function(self, scope, code):
+ # Setting and deleting are both done through the __set__
+ # method of a descriptor, so we dispatch to user's __set__
+ # or __delete__ or raise an exception.
+ base_type = scope.parent_type.base_type
+ user_set_entry = scope.lookup_here("__set__")
+ user_del_entry = scope.lookup_here("__delete__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
+ scope.mangle_internal("tp_descr_set"))
+ code.putln(
+ "if (v) {")
+ if user_set_entry:
+ code.putln(
+ "return %s(o, i, v);" %
+ user_set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_descr_set", "o, i, v", code)
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if user_del_entry:
+ code.putln(
+ "return %s(o, i);" %
+ user_del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_descr_set", "o, i, v", code)
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__delete__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_property_accessors(self, cclass_scope, code):
+ for entry in cclass_scope.property_entries:
+ property_scope = entry.scope
+ if property_scope.defines_any(["__get__"]):
+ self.generate_property_get_function(entry, code)
+ if property_scope.defines_any(["__set__", "__del__"]):
+ self.generate_property_set_function(entry, code)
+
+ def generate_property_get_function(self, property_entry, code):
+ property_scope = property_entry.scope
+ property_entry.getter_cname = property_scope.parent_scope.mangle(
+ Naming.prop_get_prefix, property_entry.name)
+ get_entry = property_scope.lookup_here("__get__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, void *x) {" %
+ property_entry.getter_cname)
+ code.putln(
+ "return %s(o);" %
+ get_entry.func_cname)
+ code.putln(
+ "}")
+
+ def generate_property_set_function(self, property_entry, code):
+ property_scope = property_entry.scope
+ property_entry.setter_cname = property_scope.parent_scope.mangle(
+ Naming.prop_set_prefix, property_entry.name)
+ set_entry = property_scope.lookup_here("__set__")
+ del_entry = property_scope.lookup_here("__del__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *v, void *x) {" %
+ property_entry.setter_cname)
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, v);" %
+ set_entry.func_cname)
+ else:
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o);" %
+ del_entry.func_cname)
+ else:
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__del__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_typeobj_definition(self, modname, entry, code):
+ type = entry.type
+ scope = type.scope
+ for suite in TypeSlots.substructures:
+ suite.generate_substructure(scope, code)
+ code.putln("")
+ if entry.visibility == 'public':
+ header = "DL_EXPORT(PyTypeObject) %s = {"
+ else:
+ header = "PyTypeObject %s = {"
+ code.putln(header % type.typeobj_cname)
+ code.putln(
+ "PyObject_HEAD_INIT(0)")
+ code.putln(
+ "0, /*ob_size*/")
+ code.putln(
+ '"%s.%s", /*tp_name*/' % (
+ modname, scope.class_name))
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ #objstruct = "struct %s" % scope.parent_type.objstruct_cname
+ objstruct = "struct %s" % type.objstruct_cname
+ code.putln(
+ "sizeof(%s), /*tp_basicsize*/" %
+ objstruct)
+ code.putln(
+ "0, /*tp_itemsize*/")
+ for slot in TypeSlots.slot_table:
+ slot.generate(scope, code)
+ code.putln(
+ "};")
+
+ def generate_method_table(self, env, code):
+ code.putln("")
+ code.putln(
+ "static struct PyMethodDef %s[] = {" %
+ env.method_table_cname)
+ for entry in env.pyfunc_entries:
+ code.put_pymethoddef(entry, ",")
+ code.putln(
+ "{0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_member_table(self, env, code):
+ #print "ModuleNode.generate_member_table: scope =", env ###
+ if env.public_attr_entries:
+ code.putln("")
+ code.putln(
+ "static struct PyMemberDef %s[] = {" %
+ env.member_table_cname)
+ type = env.parent_type
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ for entry in env.public_attr_entries:
+ type_code = entry.type.pymemberdef_typecode
+ if entry.visibility == 'readonly':
+ flags = "READONLY"
+ else:
+ flags = "0"
+ code.putln('{"%s", %s, %s, %s, 0},' % (
+ entry.name,
+ type_code,
+ "offsetof(%s, %s)" % (objstruct, entry.cname),
+ flags))
+ code.putln(
+ "{0, 0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_getset_table(self, env, code):
+ if env.property_entries:
+ code.putln("")
+ code.putln(
+ "static struct PyGetSetDef %s[] = {" %
+ env.getset_table_cname)
+ for entry in env.property_entries:
+ if entry.doc:
+ doc_code = code.get_string_const(entry.doc)
+ else:
+ doc_code = "0"
+ code.putln(
+ '{"%s", %s, %s, %s, 0},' % (
+ entry.name,
+ entry.getter_cname or "0",
+ entry.setter_cname or "0",
+ doc_code))
+ code.putln(
+ "{0, 0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_interned_name_table(self, interned_strings, code):
+ code.putln("")
+ code.putln(
+ "static PyObject **%s[] = {" % Naming.intern_tab_cname)
+ for s in interned_strings:
+ code.putln("&%s," % s.py_cname)
+ code.putln("0")
+ code.putln(
+ "};")
+
+ def generate_filename_init_prototype(self, code):
+ code.putln("");
+ code.putln("static void %s(void); /*proto*/" % Naming.fileinit_cname)
+
+ def generate_module_init_func(self, imported_modules, env, code):
+ denv = env.definition_scope
+ code.putln("")
+ header = "PyMODINIT_FUNC init%s(void)" % env.module_name
+ code.putln("%s; /*proto*/" % header)
+ code.putln("%s {" % header)
+ code.put_var_declarations(env.temp_entries)
+
+ if env.gil_used:
+ # Workaround for GIL/threading bug in 2.3
+ code.putln("#if PY_VERSION_HEX < 0x02040000 && defined(WITH_THREAD)")
+ code.putln(" PyEval_InitThreads();")
+ code.putln("#endif")
+
+ #code.putln("/*--- Libary function declarations ---*/")
+ env.generate_library_function_declarations(code)
+ self.generate_filename_init_call(code)
+
+ #code.putln("/*--- Module creation code ---*/")
+ self.generate_module_creation_code(env, code)
+
+ #code.putln("/*--- String init code ---*/")
+ self.generate_string_init_code(env, code)
+
+ #code.putln("/*--- Intern code ---*/")
+ #self.generate_intern_code(env, code)
+
+ #code.putln("/*--- Global init code ---*/")
+ self.generate_global_init_code(env, code)
+
+ #code.putln("/*--- Function export code ---*/")
+ self.generate_pxd_function_export_code(env, code)
+ self.generate_api_function_export_code(env, code)
+
+ #code.putln("/*--- Function import code ---*/")
+ for module in imported_modules:
+ self.generate_c_function_import_code_for_module(module, env, code)
+
+ #code.putln("/*--- Type init code ---*/")
+ self.generate_type_init_code(env, code)
+
+ #code.putln("/*--- Type import code ---*/")
+ for module in imported_modules:
+ self.generate_type_import_code_for_module(module, env, code)
+
+ #code.putln("/*--- Execution code ---*/")
+ self.body.generate_execution_code(code)
+ code.putln("return;")
+ code.put_label(code.error_label)
+ code.put_var_xdecrefs(env.temp_entries)
+ code.putln('__Pyx_AddTraceback("%s");' % (env.qualified_name))
+ code.use_utility_code(Nodes.traceback_utility_code)
+ code.putln('}')
+
+ def generate_filename_init_call(self, code):
+ code.putln("%s();" % Naming.fileinit_cname)
+
+ def generate_module_creation_code(self, env, code):
+ # Generate code to create the module object and
+ # install the builtins.
+ if env.doc:
+ doc = env.doc_cname
+ else:
+ doc = "0"
+ code.putln(
+ '%s = Py_InitModule4("%s", %s, %s, 0, PYTHON_API_VERSION);' % (
+ env.module_cname,
+ env.module_name,
+ env.method_table_cname,
+ doc))
+ code.putln(
+ "if (!%s) %s;" % (
+ env.module_cname,
+ code.error_goto(self.pos)));
+ code.putln(
+ "Py_INCREF(%s);" %
+ env.module_cname)
+ code.putln(
+ '%s = PyImport_AddModule("__builtin__");' %
+ Naming.builtins_cname)
+ code.putln(
+ "if (!%s) %s;" % (
+ Naming.builtins_cname,
+ code.error_goto(self.pos)));
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
+ env.module_cname,
+ Naming.builtins_cname,
+ code.error_goto(self.pos)))
+
+ def generate_string_init_code(self, env, code):
+ code.use_utility_code(Nodes.init_string_tab_utility_code)
+ code.putln(
+ "if (__Pyx_InitStrings(%s) < 0) %s;" % (
+ Naming.stringtab_cname,
+ code.error_goto(self.pos)))
+
+ def generate_global_init_code(self, env, code):
+ # Generate code to initialise global PyObject *
+ # variables to None.
+ for entry in env.var_entries:
+ if entry.visibility <> 'extern':
+ if entry.type.is_pyobject:
+ code.put_init_var_to_py_none(entry)
+
+ def generate_pxd_function_export_code(self, env, code):
+ denv = env.definition_scope
+ for entry in denv.cfunc_entries:
+ if entry.visibility <> 'extern':
+ self.generate_c_function_export_code(env, entry, code)
+
+ def generate_api_function_export_code(self, env, code):
+ for entry in env.cfunc_entries:
+ if entry.api:
+ self.generate_c_function_export_code(env, entry, code)
+
+ def generate_c_function_export_code(self, env, entry, code):
+ code.use_utility_code(function_export_utility_code)
+ signature = entry.type.signature_string()
+ code.putln('if (__Pyx_ExportFunction("%s", (void*)%s, "%s") < 0) %s' % (
+ entry.name,
+ entry.cname,
+ signature,
+ code.error_goto(self.pos)))
+
+ def generate_type_import_code_for_module(self, module, env, code):
+ # Generate type import code for all exported extension types in
+ # an imported module.
+ #if module.c_class_entries:
+ #print "generate_type_import_code_for_module:", module ###
+ for entry in module.c_class_entries:
+ if entry.defined_in_pxd:
+ self.generate_type_import_code(env, entry.type, entry.pos, code)
+
+ def generate_c_function_import_code_for_module(self, module, env, code):
+ # Generate import code for all exported C functions in a cimported module.
+ entries = []
+ for entry in module.cfunc_entries:
+ if entry.defined_in_pxd:
+ entries.append(entry)
+ if entries:
+ code.use_utility_code(import_module_utility_code)
+ code.use_utility_code(function_import_utility_code)
+ temp = self.module_temp_cname
+ code.putln(
+ '%s = __Pyx_ImportModule("%s"); if (!%s) %s' % (
+ temp,
+ module.qualified_name,
+ temp,
+ code.error_goto(self.pos)))
+ for entry in entries:
+ code.putln(
+ 'if (__Pyx_ImportFunction(%s, "%s", (void**)&%s, "%s") < 0) %s' % (
+ temp,
+ entry.name,
+ entry.cname,
+ entry.type.signature_string(),
+ code.error_goto(self.pos)))
+ code.putln("Py_DECREF(%s); %s = 0;" % (temp, temp))
+
+ def generate_type_init_code(self, env, code):
+ # Generate type import code for extern extension types
+ # and type ready code for non-extern ones.
+ #print "generate_type_init_code:", env ###
+ denv = env.definition_scope
+ for entry in denv.c_class_entries + env.c_class_entries:
+ if entry.visibility == 'extern':
+ self.generate_type_import_code(env, entry.type, entry.pos, code)
+ else:
+ self.generate_base_type_import_code(env, entry, code)
+ self.generate_exttype_vtable_init_code(entry, code)
+ self.generate_type_ready_code(env, entry, code)
+ self.generate_typeptr_assignment_code(entry, code)
+
+ def generate_base_type_import_code(self, env, entry, code):
+ base_type = entry.type.base_type
+ if base_type and base_type.module_name <> env.qualified_name:
+ self.generate_type_import_code(env, base_type, self.pos, code)
+
+ def use_type_import_utility_code(self, code):
+ import ExprNodes
+ code.use_utility_code(type_import_utility_code)
+ code.use_utility_code(import_module_utility_code)
+
+ def generate_type_import_code(self, env, type, pos, code):
+ # If not already done, generate code to import the typeobject of an
+ # extension type defined in another module, and extract its C method
+ # table pointer if any.
+ #print "generate_type_import_code:", type ###
+ if not type.is_builtin and type not in env.types_imported:
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ self.generate_type_import_call(type, code, code.error_goto(pos))
+ self.use_type_import_utility_code(code)
+ if type.vtabptr_cname:
+ code.putln(
+ "if (__Pyx_GetVtable(%s->tp_dict, &%s) < 0) %s" % (
+ type.typeptr_cname,
+ type.vtabptr_cname,
+ code.error_goto(pos)))
+ code.use_utility_code(Nodes.get_vtable_utility_code)
+ env.types_imported[type] = 1
+
+ def generate_type_import_call(self, type, code, error_code):
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ code.putln('%s = __Pyx_ImportType("%s", "%s", sizeof(%s)); if (!%s) %s' % (
+ type.typeptr_cname,
+ type.module_name,
+ type.name,
+ objstruct,
+ type.typeptr_cname,
+ error_code))
+
+ def generate_type_ready_code(self, env, entry, code):
+ # Generate a call to PyType_Ready for an extension
+ # type defined in this module.
+ type = entry.type
+ typeobj_cname = type.typeobj_cname
+ scope = type.scope
+ if scope: # could be None if there was an error
+ if entry.visibility <> 'extern':
+ for slot in TypeSlots.slot_table:
+ slot.generate_dynamic_init_code(scope, code)
+ code.putln(
+ "if (PyType_Ready(&%s) < 0) %s" % (
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ if type.vtable_cname:
+ code.putln(
+ "if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
+ typeobj_cname,
+ type.vtabptr_cname,
+ code.error_goto(entry.pos)))
+ code.use_utility_code(Nodes.set_vtable_utility_code)
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s' % (
+ Naming.module_cname,
+ scope.class_name,
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ weakref_entry = scope.lookup_here("__weakref__")
+ if weakref_entry:
+ if weakref_entry.type is py_object_type:
+ tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
+ code.putln("if (%s == 0) %s = offsetof(struct %s, %s);" % (
+ tp_weaklistoffset,
+ tp_weaklistoffset,
+ type.objstruct_cname,
+ weakref_entry.cname))
+ else:
+ error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
+
+ def generate_exttype_vtable_init_code(self, entry, code):
+ # Generate code to initialise the C method table of an
+ # extension type.
+ type = entry.type
+ if type.vtable_cname:
+ code.putln(
+ "%s = &%s;" % (
+ type.vtabptr_cname,
+ type.vtable_cname))
+ if type.base_type and type.base_type.vtabptr_cname:
+ code.putln(
+ "%s.%s = *%s;" % (
+ type.vtable_cname,
+ Naming.obj_base_cname,
+ type.base_type.vtabptr_cname))
+ for meth_entry in type.scope.cfunc_entries:
+ if meth_entry.func_cname:
+ code.putln(
+ "*(void(**)(void))&%s.%s = (void(*)(void))%s;" % (
+ type.vtable_cname,
+ meth_entry.cname,
+ meth_entry.func_cname))
+
+ def generate_typeptr_assignment_code(self, entry, code):
+ # Generate code to initialise the typeptr of an extension
+ # type defined in this module to point to its type object.
+ type = entry.type
+ if type.typeobj_cname:
+ code.putln(
+ "%s = &%s;" % (
+ type.typeptr_cname, type.typeobj_cname))
+
+ def generate_utility_functions(self, code):
+ code.global_state.generate_utility_functions(code)
+
+#------------------------------------------------------------------------------------
+#
+# Runtime support code
+#
+#------------------------------------------------------------------------------------
+
+import_module_utility_code = [
+"""
+static PyObject *__Pyx_ImportModule(char *name); /*proto*/
+""","""
+#ifndef __PYX_HAVE_RT_ImportModule
+#define __PYX_HAVE_RT_ImportModule
+static PyObject *__Pyx_ImportModule(char *name) {
+ PyObject *py_name = 0;
+
+ py_name = PyString_FromString(name);
+ if (!py_name)
+ goto bad;
+ return PyImport_Import(py_name);
+bad:
+ Py_XDECREF(py_name);
+ return 0;
+}
+#endif
+"""]
+
+#------------------------------------------------------------------------------------
+
+type_import_utility_code = [
+"""
+static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/
+""",r"""
+#ifndef __PYX_HAVE_RT_ImportType
+#define __PYX_HAVE_RT_ImportType
+static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name,
+ long size)
+{
+ PyObject *py_module = 0;
+ PyObject *result = 0;
+
+ py_module = __Pyx_ImportModule(module_name);
+ if (!py_module)
+ goto bad;
+ result = PyObject_GetAttrString(py_module, class_name);
+ if (!result)
+ goto bad;
+ if (!PyType_Check(result)) {
+ PyErr_Format(PyExc_TypeError,
+ "%s.%s is not a type object",
+ module_name, class_name);
+ goto bad;
+ }
+#ifdef __PYX_CHECK_IMPORTED_TYPES
+ if (((PyTypeObject *)result)->tp_basicsize != size) {
+ PyErr_Format(PyExc_ValueError,
+ "%s.%s does not appear to be the correct type object",
+ module_name, class_name);
+ goto bad;
+ }
+#endif
+ return (PyTypeObject *)result;
+bad:
+ Py_XDECREF(result);
+ return 0;
+}
+#endif
+"""]
+
+#------------------------------------------------------------------------------------
+
+function_export_utility_code = [
+"""
+static int __Pyx_ExportFunction(char *n, void *f, char *s); /*proto*/
+""",r"""
+static int __Pyx_ExportFunction(char *n, void *f, char *s) {
+ PyObject *d = 0;
+ PyObject *p = 0;
+ d = PyObject_GetAttrString(%(MODULE)s, "%(API)s");
+ if (!d) {
+ PyErr_Clear();
+ d = PyDict_New();
+ if (!d)
+ goto bad;
+ Py_INCREF(d);
+ if (PyModule_AddObject(%(MODULE)s, "%(API)s", d) < 0)
+ goto bad;
+ }
+ p = PyCObject_FromVoidPtrAndDesc(f, s, 0);
+ if (!p)
+ goto bad;
+ if (PyDict_SetItemString(d, n, p) < 0)
+ goto bad;
+ Py_DECREF(p);
+ Py_DECREF(d);
+ return 0;
+bad:
+ Py_XDECREF(p);
+ Py_XDECREF(d);
+ return -1;
+}
+""" % {'MODULE': Naming.module_cname, 'API': Naming.api_name}]
+
+#------------------------------------------------------------------------------------
+
+function_import_utility_code = [
+"""
+static int __Pyx_ImportFunction(PyObject *module, char *funcname, void **f, char *sig); /*proto*/
+""","""
+#ifndef __PYX_HAVE_RT_ImportFunction
+#define __PYX_HAVE_RT_ImportFunction
+static int __Pyx_ImportFunction(PyObject *module, char *funcname, void **f, char *sig) {
+ PyObject *d = 0;
+ PyObject *cobj = 0;
+ char *desc;
+
+ d = PyObject_GetAttrString(module, "%(API)s");
+ if (!d)
+ goto bad;
+ cobj = PyDict_GetItemString(d, funcname);
+ if (!cobj) {
+ PyErr_Format(PyExc_ImportError,
+ "%%s does not export expected C function %%s",
+ PyModule_GetName(module), funcname);
+ goto bad;
+ }
+ desc = (char *)PyCObject_GetDesc(cobj);
+ if (!desc)
+ goto bad;
+ if (strcmp(desc, sig) != 0) {
+ PyErr_Format(PyExc_TypeError,
+ "C function %%s.%%s has wrong signature (expected %%s, got %%s)",
+ PyModule_GetName(module), funcname, sig, desc);
+ goto bad;
+ }
+ *f = PyCObject_AsVoidPtr(cobj);
+ Py_DECREF(d);
+ return 0;
+bad:
+ Py_XDECREF(d);
+ return -1;
+}
+#endif
+""" % dict(API = Naming.api_name)]
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Naming.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Naming.py
new file mode 100644
index 00000000..61076a81
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Naming.py
@@ -0,0 +1,69 @@
+#
+# Pyrex - C naming conventions
+#
+#
+# Prefixes for generating C names.
+# Collected here to facilitate ensuring uniqueness.
+#
+
+pyrex_prefix = "__pyx_"
+
+arg_prefix = pyrex_prefix + "arg_"
+funcdoc_prefix = pyrex_prefix + "doc_"
+enum_prefix = pyrex_prefix + "e_"
+func_prefix = pyrex_prefix + "f_"
+gstab_prefix = pyrex_prefix + "getsets_"
+prop_get_prefix = pyrex_prefix + "getprop_"
+const_prefix = pyrex_prefix + "k"
+label_prefix = pyrex_prefix + "L"
+pymethdef_prefix = pyrex_prefix + "mdef_"
+methtab_prefix = pyrex_prefix + "methods_"
+memtab_prefix = pyrex_prefix + "members_"
+interned_prefix = pyrex_prefix + "n_"
+objstruct_prefix = pyrex_prefix + "obj_"
+typeptr_prefix = pyrex_prefix + "ptype_"
+prop_set_prefix = pyrex_prefix + "setprop_"
+type_prefix = pyrex_prefix + "t_"
+typeobj_prefix = pyrex_prefix + "type_"
+var_prefix = pyrex_prefix + "v_"
+vtable_prefix = pyrex_prefix + "vtable_"
+vtabptr_prefix = pyrex_prefix + "vtabptr_"
+vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
+
+args_cname = pyrex_prefix + "args"
+kwdlist_cname = pyrex_prefix + "argnames"
+obj_base_cname = pyrex_prefix + "base"
+builtins_cname = pyrex_prefix + "b"
+moddict_cname = pyrex_prefix + "d"
+default_prefix = pyrex_prefix + "d"
+dummy_cname = pyrex_prefix + "dummy"
+filename_cname = pyrex_prefix + "filename"
+filetable_cname = pyrex_prefix + "f"
+filenames_cname = pyrex_prefix + "filenames"
+fileinit_cname = pyrex_prefix + "init_filenames"
+intern_tab_cname = pyrex_prefix + "intern_tab"
+kwds_cname = pyrex_prefix + "kwds"
+lineno_cname = pyrex_prefix + "lineno"
+module_cname = pyrex_prefix + "m"
+moddoc_cname = pyrex_prefix + "mdoc"
+methtable_cname = pyrex_prefix + "methods"
+retval_cname = pyrex_prefix + "r"
+reqd_kwds_cname = pyrex_prefix + "reqd_kwds"
+self_cname = pyrex_prefix + "self"
+stringtab_cname = pyrex_prefix + "string_tab"
+vtabslot_cname = pyrex_prefix + "vtab"
+
+extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
+
+exc_type_name = pyrex_prefix + "exc_type"
+exc_value_name = pyrex_prefix + "exc_value"
+exc_tb_name = pyrex_prefix + "exc_tb"
+exc_lineno_name = pyrex_prefix + "exc_lineno"
+
+exc_vars = (exc_type_name, exc_value_name, exc_tb_name)
+
+api_name = pyrex_prefix + "capi__"
+
+h_guard_prefix = "__PYX_HAVE__"
+api_guard_prefix = "__PYX_HAVE_API__"
+api_func_guard = "__PYX_HAVE_API_FUNC_"
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Nodes.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Nodes.py
new file mode 100644
index 00000000..fb974df0
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Nodes.py
@@ -0,0 +1,3249 @@
+#
+# Pyrex - Parse tree nodes
+#
+
+import string, sys
+
+import Code
+from Errors import error, one_time_warning, InternalError
+import Naming
+import PyrexTypes
+from PyrexTypes import py_object_type, c_int_type, error_type, \
+ CTypedefType, CFuncType
+from Symtab import ModuleScope, LocalScope, \
+ StructOrUnionScope, PyClassScope, CClassScope
+from Pyrex.Utils import open_new_file, replace_suffix
+import Options
+
+from DebugFlags import debug_disposal_code
+
+class Node:
+ # pos (string, int, int) Source file position
+ # is_name boolean Is a NameNode
+ # is_literal boolean Is a ConstNode
+
+ is_name = 0
+ is_literal = 0
+
+ def __init__(self, pos, **kw):
+ self.pos = pos
+ self.__dict__.update(kw)
+
+ gil_message = "Operation"
+
+ def gil_check(self, env):
+ if env.nogil:
+ self.gil_error()
+
+ def gil_error(self, message = None):
+ error(self.pos, "%s not allowed without gil" % (message or self.gil_message))
+
+ #
+ # There are 3 phases of parse tree processing, applied in order to
+ # all the statements in a given scope-block:
+ #
+ # (1) analyse_declarations
+ # Make symbol table entries for all declarations at the current
+ # level, both explicit (def, cdef, etc.) and implicit (assignment
+ # to an otherwise undeclared name).
+ #
+ # (2) analyse_expressions
+ # Determine the result types of expressions and fill in the
+ # 'type' attribute of each ExprNode. Insert coercion nodes into the
+ # tree where needed to convert to and from Python objects.
+ # Allocate temporary locals for intermediate results.
+ #
+ # (3) generate_code
+ # Emit C code for all declarations, statements and expressions.
+ # Recursively applies the 3 processing phases to the bodies of
+ # functions.
+ #
+
+ def analyse_declarations(self, env):
+ pass
+
+ def analyse_expressions(self, env):
+ raise InternalError("analyse_expressions not implemented for %s" % \
+ self.__class__.__name__)
+
+ def generate_code(self, code):
+ raise InternalError("generate_code not implemented for %s" % \
+ self.__class__.__name__)
+
+
+class BlockNode:
+ # Mixin class for nodes representing a declaration block.
+ pass
+
+# def generate_const_definitions(self, env, code):
+# if env.const_entries:
+# code.putln("")
+# for entry in env.const_entries:
+# if not entry.is_interned:
+# code.put_var_declaration(entry, static = 1)
+
+# def generate_interned_name_decls(self, env, code):
+# # Flush accumulated interned names from the global scope
+# # and generate declarations for them.
+# genv = env.global_scope()
+# intern_map = genv.intern_map
+# names = genv.interned_names
+# if names:
+# code.putln("")
+# for name in names:
+# code.putln(
+# "static PyObject *%s;" % intern_map[name])
+# del names[:]
+
+# def generate_py_string_decls(self, env, code):
+# entries = env.pystring_entries
+# if entries:
+# code.putln("")
+# for entry in entries:
+# code.putln(
+# "static PyObject *%s;" % entry.pystring_cname)
+
+
+class StatListNode(Node):
+ # stats a list of StatNode
+
+ def analyse_declarations(self, env):
+ #print "StatListNode.analyse_declarations" ###
+ for stat in self.stats:
+ stat.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ #print "StatListNode.analyse_expressions" ###
+ for stat in self.stats:
+ stat.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ #print "StatListNode.generate_function_definitions" ###
+ for stat in self.stats:
+ stat.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ #print "StatListNode.generate_execution_code" ###
+ for stat in self.stats:
+ code.mark_pos(stat.pos)
+ stat.generate_execution_code(code)
+
+
+class StatNode(Node):
+ #
+ # Code generation for statements is split into the following subphases:
+ #
+ # (1) generate_function_definitions
+ # Emit C code for the definitions of any structs,
+ # unions, enums and functions defined in the current
+ # scope-block.
+ #
+ # (2) generate_execution_code
+ # Emit C code for executable statements.
+ #
+
+ def generate_function_definitions(self, env, code):
+ pass
+
+ def generate_execution_code(self, code):
+ raise InternalError("generate_execution_code not implemented for %s" % \
+ self.__class__.__name__)
+
+
+class CDefExternNode(StatNode):
+ # include_file string or None
+ # body StatNode
+
+ def analyse_declarations(self, env):
+ if self.include_file:
+ env.add_include_file(self.include_file)
+ old_cinclude_flag = env.in_cinclude
+ env.in_cinclude = 1
+ self.body.analyse_declarations(env)
+ env.in_cinclude = old_cinclude_flag
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CDeclaratorNode(Node):
+ # Part of a C declaration.
+ #
+ # Processing during analyse_declarations phase:
+ #
+ # analyse
+ # Returns (name, type) pair where name is the
+ # CNameDeclaratorNode of the name being declared
+ # and type is the type it is being declared as.
+ #
+ # calling_convention string Calling convention of CFuncDeclaratorNode
+ # for which this is a base
+
+ calling_convention = ""
+
+
+class CNameDeclaratorNode(CDeclaratorNode):
+ # name string The Pyrex name being declared
+ # cname string or None C name, if specified
+
+ def analyse(self, base_type, env):
+ return self, base_type
+
+
+class CPtrDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+
+ def analyse(self, base_type, env):
+ if base_type.is_pyobject:
+ error(self.pos,
+ "Pointer base type cannot be a Python object")
+ ptr_type = PyrexTypes.c_ptr_type(base_type)
+ return self.base.analyse(ptr_type, env)
+
+
+class CArrayDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+ # dimension ExprNode
+
+ def analyse(self, base_type, env):
+ if self.dimension:
+ self.dimension.analyse_const_expression(env)
+ if not self.dimension.type.is_int:
+ error(self.dimension.pos, "Array dimension not integer")
+ size = self.dimension.result()
+ else:
+ size = None
+ if not base_type.is_complete():
+ error(self.pos,
+ "Array element type '%s' is incomplete" % base_type)
+ if base_type.is_pyobject:
+ error(self.pos,
+ "Array element cannot be a Python object")
+ if base_type.is_cfunction:
+ error(self.pos,
+ "Array element cannot be a function")
+ array_type = PyrexTypes.c_array_type(base_type, size)
+ return self.base.analyse(array_type, env)
+
+
+class CFuncDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+ # args [CArgDeclNode]
+ # has_varargs boolean
+ # exception_value ConstNode
+ # exception_check boolean True if PyErr_Occurred check needed
+ # nogil boolean Can be called without gil
+ # with_gil boolean Acquire gil around function body
+
+ def analyse(self, return_type, env):
+ func_type_args = []
+ for arg_node in self.args:
+ name_declarator, type = arg_node.analyse(env)
+ name = name_declarator.name
+ if name_declarator.cname:
+ error(self.pos,
+ "Function argument cannot have C name specification")
+ # Turn *[] argument into **
+ if type.is_array:
+ type = PyrexTypes.c_ptr_type(type.base_type)
+ # Catch attempted C-style func(void) decl
+ if type.is_void:
+ error(arg_node.pos, "Function argument cannot be void")
+ func_type_args.append(
+ PyrexTypes.CFuncTypeArg(name, type, arg_node.pos))
+ if arg_node.default:
+ error(arg_node.pos, "C function argument cannot have default value")
+ exc_val = None
+ exc_check = 0
+ if return_type.is_pyobject \
+ and (self.exception_value or self.exception_check):
+ error(self.pos,
+ "Exception clause not allowed for function returning Python object")
+ else:
+ if self.exception_value:
+ self.exception_value.analyse_const_expression(env)
+ exc_val = self.exception_value.result()
+ if not return_type.assignable_from(self.exception_value.type):
+ error(self.exception_value.pos,
+ "Exception value incompatible with function return type")
+ exc_check = self.exception_check
+ if return_type.is_array:
+ error(self.pos,
+ "Function cannot return an array")
+ if return_type.is_cfunction:
+ error(self.pos,
+ "Function cannot return a function")
+ func_type = PyrexTypes.CFuncType(
+ return_type, func_type_args, self.has_varargs,
+ exception_value = exc_val, exception_check = exc_check,
+ calling_convention = self.base.calling_convention,
+ nogil = self.nogil, with_gil = self.with_gil)
+ return self.base.analyse(func_type, env)
+
+
+class CArgDeclNode(Node):
+ # Item in a function declaration argument list.
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # #not_none boolean Tagged with 'not None'
+ # allow_none tristate True == 'or None', False == 'not None', None = unspecified
+ # default ExprNode or None
+ # default_entry Symtab.Entry Entry for the variable holding the default value
+ # is_self_arg boolean Is the "self" arg of an extension type method
+ # is_kw_only boolean Is a keyword-only argument
+
+ is_self_arg = 0
+
+ def analyse(self, env):
+ #print "CArgDeclNode.analyse: is_self_arg =", self.is_self_arg ###
+ base_type = self.base_type.analyse(env)
+ return self.declarator.analyse(base_type, env)
+
+
+class CBaseTypeNode(Node):
+ # Abstract base class for C base type nodes.
+ #
+ # Processing during analyse_declarations phase:
+ #
+ # analyse
+ # Returns the type.
+
+ pass
+
+
+class CSimpleBaseTypeNode(CBaseTypeNode):
+ # name string
+ # module_path [string] Qualifying name components
+ # is_basic_c_type boolean
+ # signed boolean
+ # longness integer
+ # is_self_arg boolean Is self argument of C method
+
+ def analyse(self, env):
+ # Return type descriptor.
+ #print "CSimpleBaseTypeNode.analyse: is_self_arg =", self.is_self_arg ###
+ type = None
+ if self.is_basic_c_type:
+ type = PyrexTypes.simple_c_type(self.signed, self.longness, self.name)
+ if not type:
+ error(self.pos, "Unrecognised type modifier combination")
+ elif self.name == "object" and not self.module_path:
+ type = py_object_type
+ elif self.name is None:
+ if self.is_self_arg and env.is_c_class_scope:
+ #print "CSimpleBaseTypeNode.analyse: defaulting to parent type" ###
+ type = env.parent_type
+ else:
+ type = py_object_type
+ else:
+ scope = env.find_imported_module(self.module_path, self.pos)
+ if scope:
+ entry = scope.find(self.name, self.pos)
+ if entry and entry.is_type:
+ type = entry.type
+ else:
+ error(self.pos, "'%s' is not a type identifier" % self.name)
+ if type:
+ return type
+ else:
+ return PyrexTypes.error_type
+
+
+class CComplexBaseTypeNode(CBaseTypeNode):
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+
+ def analyse(self, env):
+ base = self.base_type.analyse(env)
+ _, type = self.declarator.analyse(base, env)
+ return type
+
+
+class CVarDefNode(StatNode):
+ # C variable definition or forward/extern function declaration.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # base_type CBaseTypeNode
+ # declarators [CDeclaratorNode]
+ # in_pxd boolean
+ # api boolean
+
+ def analyse_declarations(self, env, dest_scope = None):
+ if not dest_scope:
+ dest_scope = env
+ base_type = self.base_type.analyse(env)
+ for declarator in self.declarators:
+ name_declarator, type = declarator.analyse(base_type, env)
+ if not type.is_complete():
+ if not (self.visibility == 'extern' and type.is_array):
+ error(declarator.pos,
+ "Variable type '%s' is incomplete" % type)
+ if self.visibility == 'extern' and type.is_pyobject:
+ error(declarator.pos,
+ "Python object cannot be declared extern")
+ name = name_declarator.name
+ cname = name_declarator.cname
+ if type.is_cfunction:
+ entry = dest_scope.declare_cfunction(name, type, declarator.pos,
+ cname = cname, visibility = self.visibility, in_pxd = self.in_pxd,
+ api = self.api)
+ else:
+ if self.in_pxd and self.visibility <> 'extern':
+ error(self.pos,
+ "Only 'extern' C variable declaration allowed in .pxd file")
+ dest_scope.declare_var(name, type, declarator.pos,
+ cname = cname, visibility = self.visibility, is_cdef = 1)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CStructOrUnionDefNode(StatNode):
+ # name string
+ # cname string or None
+ # module_path [string]
+ # kind "struct" or "union"
+ # typedef_flag boolean
+ # cplus_flag boolean
+ # visibility "public" or "private"
+ # in_pxd boolean
+ # attributes [CVarDefNode] or None
+ # entry Entry
+ # bases [([name, ...], name), ...]
+
+ def analyse_declarations(self, env):
+ scope = None
+ base_scopes = []
+ for base in self.bases:
+ base_entry = env.find_qualified_name(base, self.pos)
+ if base_entry:
+ if base_entry.is_type and base_entry.type.is_struct_or_union \
+ and base_entry.type.scope.is_cplus:
+ base_scopes.append(base_entry.type.scope)
+ else:
+ error(self.pos, "Base type '%s' is not a C++ struct" %
+ ".".join(base[0] + [base[1]]))
+ if self.attributes is not None:
+ scope = StructOrUnionScope(base_scopes = base_scopes, is_cplus = self.cplus_flag)
+ if self.module_path:
+ home_scope = env.find_imported_module(self.module_path, self.pos)
+ if not home_scope:
+ return
+ else:
+ home_scope = env
+ def declare():
+ self.entry = home_scope.declare_struct_or_union(
+ self.name, self.kind, scope, self.typedef_flag, self.pos,
+ self.cname, visibility = self.visibility)
+ if self.attributes is not None:
+ if self.in_pxd and not env.in_cinclude:
+ self.entry.defined_in_pxd = 1
+ if not self.typedef_flag:
+ declare()
+ if self.attributes is not None:
+ for attr in self.attributes:
+ attr.analyse_declarations(env, scope)
+ if self.typedef_flag:
+ declare()
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CEnumDefNode(StatNode):
+ # name string or None
+ # cname string or None
+ # items [CEnumDefItemNode]
+ # typedef_flag boolean
+ # visibility "public" or "private"
+ # in_pxd boolean
+ # entry Entry
+
+ def analyse_declarations(self, env):
+ self.entry = env.declare_enum(self.name, self.pos,
+ cname = self.cname, typedef_flag = self.typedef_flag,
+ visibility = self.visibility)
+ if self.items is not None:
+ if self.in_pxd and not env.in_cinclude:
+ self.entry.defined_in_pxd = 1
+ for item in self.items:
+ item.analyse_declarations(env, self.entry)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CEnumDefItemNode(StatNode):
+ # name string
+ # cname string or None
+ # value ExprNode or None
+
+ def analyse_declarations(self, env, enum_entry):
+ value_node = self.value
+ if value_node:
+ value_node.analyse_const_expression(env)
+ type = value_node.type
+ if type.is_int or type.is_enum:
+ value = value_node.result()
+ else:
+ error(self.pos,
+ "Type '%s' is not a valid enum value" % type)
+ value = "<error>"
+ else:
+ value = self.name
+ entry = env.declare_const(self.name, enum_entry.type,
+ value, self.pos, cname = self.cname)
+ enum_entry.enum_values.append(entry)
+
+
+class CTypeDefNode(StatNode):
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # visibility "public" or "private"
+ # in_pxd boolean
+
+ def analyse_declarations(self, env):
+ base = self.base_type.analyse(env)
+ name_declarator, type = self.declarator.analyse(base, env)
+ name = name_declarator.name
+ cname = name_declarator.cname
+ entry = env.declare_typedef(name, type, self.pos,
+ cname = cname, visibility = self.visibility)
+ if self.in_pxd and not env.in_cinclude:
+ entry.defined_in_pxd = 1
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FuncDefNode(StatNode, BlockNode):
+ # Base class for function definition nodes.
+ #
+ # return_type PyrexType
+ # #filename string C name of filename string const
+ # entry Symtab.Entry
+
+ def analyse_expressions(self, env):
+ pass
+
+ def need_gil_acquisition(self, lenv):
+ return 0
+
+ def generate_function_definitions(self, env, code):
+ # Generate C code for header and body of function
+ genv = env.global_scope()
+ lenv = LocalScope(name = self.entry.name, outer_scope = genv)
+ lenv.return_type = self.return_type
+ type = self.entry.type
+ if type.is_cfunction:
+ lenv.nogil = type.nogil and not type.with_gil
+ code.init_labels()
+ self.declare_arguments(lenv)
+ self.body.analyse_declarations(lenv)
+ self.body.analyse_expressions(lenv)
+ # Code for nested function definitions would go here
+ # if we supported them, which we probably won't.
+ # ----- Function header
+ code.putln("")
+ self.generate_function_header(code,
+ with_pymethdef = env.is_py_class_scope)
+ # ----- Local variable declarations
+ self.generate_argument_declarations(lenv, code)
+ code.put_var_declarations(lenv.var_entries)
+ init = ""
+ if not self.return_type.is_void:
+ code.putln(
+ "%s%s;" %
+ (self.return_type.declaration_code(
+ Naming.retval_cname),
+ init))
+ code.put_var_declarations(lenv.temp_entries)
+ self.generate_keyword_list(code)
+ # ----- Extern library function declarations
+ lenv.generate_library_function_declarations(code)
+ # ----- GIL acquisition
+ acquire_gil = self.need_gil_acquisition(lenv)
+ if acquire_gil:
+ lenv.global_scope().gil_used = 1
+ code.putln("PyGILState_STATE _save = PyGILState_Ensure();")
+ # ----- Fetch arguments
+ self.generate_argument_parsing_code(code)
+ self.generate_argument_increfs(lenv, code)
+ # ----- Initialise local variables
+ for entry in lenv.var_entries:
+ if entry.type.is_pyobject and entry.init_to_none and entry.used:
+ code.put_init_var_to_py_none(entry)
+ # ----- Check and convert arguments
+ self.generate_argument_conversion_code(code)
+ self.generate_argument_type_tests(code)
+ # ----- Function body
+ self.body.generate_execution_code(code)
+ # ----- Default return value
+ code.putln("")
+ if self.return_type.is_pyobject:
+ #if self.return_type.is_extension_type:
+ # lhs = "(PyObject *)%s" % Naming.retval_cname
+ #else:
+ lhs = Naming.retval_cname
+ code.put_init_to_py_none(lhs, self.return_type)
+ else:
+ val = self.return_type.default_value
+ if val:
+ code.putln("%s = %s;" % (Naming.retval_cname, val))
+ #code.putln("goto %s;" % code.return_label)
+ # ----- Error cleanup
+ if code.error_label in code.labels_used:
+ code.put_goto(code.return_label)
+ code.put_label(code.error_label)
+ code.put_var_xdecrefs(lenv.temp_entries)
+ default_retval = self.return_type.default_value
+ err_val = self.error_value()
+ exc_check = self.caller_will_check_exceptions()
+ if err_val or exc_check:
+ code.putln(
+ '__Pyx_AddTraceback("%s");' %
+ self.entry.qualified_name)
+ val = err_val or default_retval
+ if val:
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ val))
+ else:
+ code.use_utility_code(unraisable_exception_utility_code)
+ code.putln(
+ '__Pyx_WriteUnraisable("%s");' %
+ self.entry.qualified_name)
+ #if not self.return_type.is_void:
+ if default_retval:
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ default_retval))
+ #self.return_type.default_value))
+ # ----- Return cleanup
+ code.put_label(code.return_label)
+ code.put_var_decrefs(lenv.var_entries, used_only = 1)
+ #code.put_var_decrefs(lenv.arg_entries)
+ self.generate_argument_decrefs(lenv, code)
+ self.put_stararg_decrefs(code)
+ if acquire_gil:
+ code.putln("PyGILState_Release(_save);")
+ if not self.return_type.is_void:
+ code.putln("return %s;" % Naming.retval_cname)
+ code.putln("}")
+
+ def put_stararg_decrefs(self, code):
+ pass
+
+ def declare_argument(self, env, arg, readonly = 0):
+ if arg.type.is_void:
+ error(arg.pos, "Invalid use of 'void'")
+ elif not arg.type.is_complete() and not arg.type.is_array:
+ error(arg.pos,
+ "Argument type '%s' is incomplete" % arg.type)
+ return env.declare_arg(arg.name, arg.type, arg.pos,
+ readonly = readonly)
+
+ def generate_argument_increfs(self, env, code):
+ # Turn writable borrowed argument refs into owned refs.
+ # This is necessary, because if the argument is assigned to,
+ # it will be decrefed.
+ for entry in env.arg_entries:
+ if not entry.is_readonly:
+ code.put_var_incref(entry)
+
+ def generate_argument_decrefs(self, env, code):
+ for entry in env.arg_entries:
+ if not entry.is_readonly:
+ code.put_var_decref(entry)
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CFuncDefNode(FuncDefNode):
+ # C function definition.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # body StatListNode
+ # api boolean
+ #
+ # with_gil boolean Acquire GIL around body
+ # type CFuncType
+
+ def unqualified_name(self):
+ return self.entry.name
+
+ def analyse_declarations(self, env):
+ base_type = self.base_type.analyse(env)
+ name_declarator, type = self.declarator.analyse(base_type, env)
+ if not type.is_cfunction:
+ error(self.pos,
+ "Suite attached to non-function declaration")
+ # Remember the actual type according to the function header
+ # written here, because the type in the symbol table entry
+ # may be different if we're overriding a C method inherited
+ # from the base type of an extension type.
+ self.type = type
+ name = name_declarator.name
+ cname = name_declarator.cname
+ self.entry = env.declare_cfunction(
+ name, type, self.pos,
+ cname = cname, visibility = self.visibility,
+ defining = self.body is not None,
+ api = self.api)
+ self.return_type = type.return_type
+
+ def declare_arguments(self, env):
+ type = self.type
+ without_gil = type.nogil and not type.with_gil
+ for arg in type.args:
+ if not arg.name:
+ error(arg.pos, "Missing argument name")
+ self.declare_argument(env, arg,
+ readonly = without_gil and arg.type.is_pyobject)
+
+ def need_gil_acquisition(self, lenv):
+ type = self.type
+ with_gil = type.with_gil
+ if type.nogil and not with_gil:
+# for arg in type.args:
+# if arg.type.is_pyobject:
+# error(self.pos,
+# "Function with Python argument cannot be declared nogil")
+ if type.return_type.is_pyobject:
+ error(self.pos,
+ "Function with Python return type cannot be declared nogil")
+ for entry in lenv.var_entries + lenv.temp_entries:
+ #print "CFuncDefNode.need_gil_acquisition:", entry.name, entry.cname, "readonly =", entry.is_readonly ###
+ if entry.type.is_pyobject and not entry.is_readonly:
+ error(self.pos, "Function declared nogil has Python locals or temporaries")
+ return with_gil
+
+ def generate_function_header(self, code, with_pymethdef):
+ arg_decls = []
+ type = self.type
+ visibility = self.entry.visibility
+ for arg in type.args:
+ arg_decls.append(arg.declaration_code())
+ if type.has_varargs:
+ arg_decls.append("...")
+ if not arg_decls:
+ arg_decls = ["void"]
+ entity = type.function_header_code(self.entry.func_cname,
+ string.join(arg_decls, ","))
+ if visibility == 'public':
+ dll_linkage = "DL_EXPORT"
+ else:
+ dll_linkage = None
+ header = self.return_type.declaration_code(entity,
+ dll_linkage = dll_linkage)
+ if visibility <> 'private':
+ storage_class = "%s " % Naming.extern_c_macro
+ else:
+ storage_class = "static "
+ code.putln("%s%s {" % (
+ storage_class,
+ header))
+
+ def generate_argument_declarations(self, env, code):
+ # Arguments already declared in function header
+ pass
+
+ def generate_keyword_list(self, code):
+ pass
+
+ def generate_argument_parsing_code(self, code):
+ pass
+
+ def generate_argument_conversion_code(self, code):
+ pass
+
+ def generate_argument_type_tests(self, code):
+ pass
+
+ def error_value(self):
+ if self.return_type.is_pyobject:
+ return "0"
+ else:
+ #return None
+ return self.entry.type.exception_value
+
+ def caller_will_check_exceptions(self):
+ return self.entry.type.exception_check
+
+
+class PyArgDeclNode(Node):
+ # Argument which must be a Python object (used
+ # for * and ** arguments).
+ #
+ # name string
+ # entry Symtab.Entry
+
+ pass
+
+
+class DefNode(FuncDefNode):
+ # A Python function definition.
+ #
+ # name string the Python name of the function
+ # args [CArgDeclNode] formal arguments
+ # star_arg PyArgDeclNode or None * argument
+ # starstar_arg PyArgDeclNode or None ** argument
+ # doc string or None
+ # body StatListNode
+ #
+ # The following subnode is constructed internally
+ # when the def statement is inside a Python class definition.
+ #
+ # assmt AssignmentNode Function construction/assignment
+
+ assmt = None
+ num_kwonly_args = 0
+ reqd_kw_flags_cname = "0"
+ has_star_or_kwonly_args = 0
+
+ def __init__(self, pos, **kwds):
+ FuncDefNode.__init__(self, pos, **kwds)
+ n = 0
+ for arg in self.args:
+ if arg.kw_only:
+ n += 1
+ self.num_kwonly_args = n
+ if self.star_arg or self.starstar_arg or n > 0:
+ self.has_star_or_kwonly_args = 1
+
+ def analyse_declarations(self, env):
+ for arg in self.args:
+ base_type = arg.base_type.analyse(env)
+ name_declarator, type = \
+ arg.declarator.analyse(base_type, env)
+ arg.name = name_declarator.name
+ if name_declarator.cname:
+ error(self.pos,
+ "Python function argument cannot have C name specification")
+ arg.type = type.as_argument_type()
+ arg.hdr_type = None
+ arg.needs_conversion = 0
+ arg.needs_type_test = 0
+ arg.is_generic = 1
+ if arg.allow_none is not None and not arg.type.is_extension_type:
+ error(self.pos,
+ "Only extension type arguments can have 'or None' or 'not None'")
+ self.declare_pyfunction(env)
+ self.analyse_signature(env)
+ self.return_type = self.entry.signature.return_type()
+# if self.has_star_or_kwonly_args:
+# env.use_utility_code(get_starargs_utility_code)
+
+ def analyse_signature(self, env):
+ any_type_tests_needed = 0
+ sig = self.entry.signature
+ nfixed = sig.num_fixed_args()
+ for i in range(nfixed):
+ if i < len(self.args):
+ arg = self.args[i]
+ arg.is_generic = 0
+ if sig.is_self_arg(i):
+ arg.is_self_arg = 1
+ arg.hdr_type = arg.type = env.parent_type
+ arg.needs_conversion = 0
+ else:
+ arg.hdr_type = sig.fixed_arg_type(i)
+ if not arg.type.same_as(arg.hdr_type):
+ if arg.hdr_type.is_pyobject and arg.type.is_pyobject:
+ arg.needs_type_test = 1
+ any_type_tests_needed = 1
+ else:
+ arg.needs_conversion = 1
+ if arg.needs_conversion:
+ arg.hdr_cname = Naming.arg_prefix + arg.name
+ else:
+ arg.hdr_cname = Naming.var_prefix + arg.name
+ else:
+ self.bad_signature()
+ return
+ if nfixed < len(self.args):
+ if not sig.has_generic_args:
+ self.bad_signature()
+ for arg in self.args:
+ if arg.is_generic and arg.type.is_extension_type:
+ arg.needs_type_test = 1
+ any_type_tests_needed = 1
+# if any_type_tests_needed:
+# env.use_utility_code(arg_type_test_utility_code)
+
+ def bad_signature(self):
+ sig = self.entry.signature
+ expected_str = "%d" % sig.num_fixed_args()
+ if sig.has_generic_args:
+ expected_str = expected_str + " or more"
+ name = self.name
+ if name.startswith("__") and name.endswith("__"):
+ desc = "Special method"
+ else:
+ desc = "Method"
+ error(self.pos,
+ "%s %s has wrong number of arguments "
+ "(%d declared, %s expected)" % (
+ desc, self.name, len(self.args), expected_str))
+
+ def declare_pyfunction(self, env):
+ #print "DefNode.declare_pyfunction:", self.name, "in", env ###
+ name = self.name
+ entry = env.declare_pyfunction(self.name, self.pos)
+ self.entry = entry
+ prefix = env.scope_prefix
+ entry.func_cname = \
+ Naming.func_prefix + prefix + name
+ entry.pymethdef_cname = \
+ Naming.pymethdef_prefix + prefix + name
+ if not entry.is_special:
+ entry.doc = self.doc
+ entry.doc_cname = \
+ Naming.funcdoc_prefix + prefix + name
+
+ def declare_arguments(self, env):
+ for arg in self.args:
+ if not arg.name:
+ error(arg.pos, "Missing argument name")
+ if arg.needs_conversion:
+ arg.entry = env.declare_var(arg.name, arg.type, arg.pos)
+ if arg.type.is_pyobject:
+ arg.entry.init = "0"
+ arg.entry.init_to_none = 0
+ else:
+ arg.entry = self.declare_argument(env, arg)
+ arg.entry.used = 1
+ arg.entry.is_self_arg = arg.is_self_arg
+ if arg.hdr_type:
+ if arg.is_self_arg or \
+ (arg.type.is_extension_type and not arg.hdr_type.is_extension_type):
+ arg.entry.is_declared_generic = 1
+ self.declare_python_arg(env, self.star_arg)
+ self.declare_python_arg(env, self.starstar_arg)
+
+ def declare_python_arg(self, env, arg):
+ if arg:
+ entry = env.declare_var(arg.name,
+ PyrexTypes.py_object_type, arg.pos)
+ entry.used = 1
+ entry.init = "0"
+ entry.init_to_none = 0
+ entry.xdecref_cleanup = 1
+ arg.entry = entry
+
+ def analyse_expressions(self, env):
+ self.analyse_default_values(env)
+ if env.is_py_class_scope:
+ self.synthesize_assignment_node(env)
+
+ def analyse_default_values(self, env):
+ for arg in self.args:
+ if arg.default:
+ if arg.is_generic:
+ arg.default.analyse_types(env)
+ arg.default = arg.default.coerce_to(arg.type, env)
+ arg.default.allocate_temps(env)
+ arg.default_entry = env.add_default_value(arg.type)
+ arg.default_entry.used = 1
+ else:
+ error(arg.pos,
+ "This argument cannot have a default value")
+ arg.default = None
+
+ def synthesize_assignment_node(self, env):
+ import ExprNodes
+ self.assmt = SingleAssignmentNode(self.pos,
+ lhs = ExprNodes.NameNode(self.pos, name = self.name),
+ rhs = ExprNodes.UnboundMethodNode(self.pos,
+ class_cname = env.class_obj_cname,
+ function = ExprNodes.PyCFunctionNode(self.pos,
+ pymethdef_cname = self.entry.pymethdef_cname)))
+ self.assmt.analyse_declarations(env)
+ self.assmt.analyse_expressions(env)
+
+ def generate_function_header(self, code, with_pymethdef):
+ arg_code_list = []
+ sig = self.entry.signature
+ if sig.has_dummy_arg:
+ arg_code_list.append(
+ "PyObject *%s" % Naming.self_cname)
+ for arg in self.args:
+ if not arg.is_generic:
+ if arg.is_self_arg:
+ arg_code_list.append("PyObject *%s" % arg.hdr_cname)
+ else:
+ arg_code_list.append(
+ arg.hdr_type.declaration_code(arg.hdr_cname))
+ if sig.has_generic_args:
+ arg_code_list.append(
+ "PyObject *%s, PyObject *%s"
+ % (Naming.args_cname, Naming.kwds_cname))
+ arg_code = ", ".join(arg_code_list)
+ dc = self.return_type.declaration_code(self.entry.func_cname)
+ header = "static %s(%s)" % (dc, arg_code)
+ code.putln("%s; /*proto*/" % header)
+ if self.entry.doc:
+ code.putln(
+ 'static char %s[] = "%s";' % (
+ self.entry.doc_cname,
+ self.entry.doc))
+ if with_pymethdef:
+ code.put(
+ "static PyMethodDef %s = " %
+ self.entry.pymethdef_cname)
+ code.put_pymethoddef(self.entry, ";")
+ code.putln("%s {" % header)
+
+ def generate_argument_declarations(self, env, code):
+ for arg in self.args:
+ if arg.is_generic: # or arg.needs_conversion:
+ code.put_var_declaration(arg.entry)
+
+ def generate_keyword_list(self, code):
+ if self.entry.signature.has_generic_args:
+ reqd_kw_flags = []
+ has_reqd_kwds = False
+ code.put(
+ "static char *%s[] = {" %
+ Naming.kwdlist_cname)
+ for arg in self.args:
+ if arg.is_generic:
+ code.put(
+ '"%s",' %
+ arg.name)
+ if arg.kw_only and not arg.default:
+ has_reqd_kwds = 1
+ flag = "1"
+ else:
+ flag = "0"
+ reqd_kw_flags.append(flag)
+ code.putln(
+ "0};")
+ if has_reqd_kwds:
+ flags_name = Naming.reqd_kwds_cname
+ self.reqd_kw_flags_cname = flags_name
+ code.putln(
+ "static char %s[] = {%s};" % (
+ flags_name,
+ ",".join(reqd_kw_flags)))
+
+ def generate_argument_parsing_code(self, code):
+ # Generate PyArg_ParseTuple call for generic
+ # arguments, if any.
+ has_kwonly_args = self.num_kwonly_args > 0
+ has_star_or_kw_args = self.star_arg is not None \
+ or self.starstar_arg is not None or has_kwonly_args
+ if not self.entry.signature.has_generic_args:
+ if has_star_or_kw_args:
+ error(self.pos, "This method cannot have * or keyword arguments")
+ else:
+ arg_addrs = []
+ arg_formats = []
+ default_seen = 0
+ for arg in self.args:
+ arg_entry = arg.entry
+ if arg.is_generic:
+ if arg.default:
+ code.putln(
+ "%s = %s;" % (
+ arg_entry.cname,
+ arg.default_entry.cname))
+ if not default_seen:
+ arg_formats.append("|")
+ default_seen = 1
+ elif default_seen and not arg.kw_only:
+ error(arg.pos, "Non-default argument following default argument")
+ arg_addrs.append("&" + arg_entry.cname)
+ format = arg_entry.type.parsetuple_format
+ if format:
+ arg_formats.append(format)
+ else:
+ error(arg.pos,
+ "Cannot convert Python object argument to type '%s'"
+ % arg.type)
+ error_return_code = "return %s;" % self.error_value()
+ argformat = '"%s"' % string.join(arg_formats, "")
+ if has_star_or_kw_args:
+ self.generate_stararg_getting_code(code)
+ pt_arglist = [Naming.args_cname, Naming.kwds_cname, argformat,
+ Naming.kwdlist_cname] + arg_addrs
+ pt_argstring = string.join(pt_arglist, ", ")
+ code.put(
+ 'if (!PyArg_ParseTupleAndKeywords(%s)) ' %
+ pt_argstring)
+ if has_star_or_kw_args:
+ code.putln("{")
+ code.put_xdecref(Naming.args_cname, py_object_type)
+ code.put_xdecref(Naming.kwds_cname, py_object_type)
+ self.generate_arg_xdecref(self.star_arg, code)
+ self.generate_arg_xdecref(self.starstar_arg, code)
+ code.putln(error_return_code)
+ code.putln("}")
+ else:
+ code.putln(error_return_code)
+
+ def put_stararg_decrefs(self, code):
+ if self.has_star_or_kwonly_args:
+ code.put_xdecref(Naming.args_cname, py_object_type)
+ code.put_xdecref(Naming.kwds_cname, py_object_type)
+
+ def generate_arg_xdecref(self, arg, code):
+ if arg:
+ code.put_var_xdecref(arg.entry)
+
+ def arg_address(self, arg):
+ if arg:
+ return "&%s" % arg.entry.cname
+ else:
+ return 0
+
+ def generate_stararg_getting_code(self, code):
+ num_kwonly = self.num_kwonly_args
+ nargs = len(self.args) - num_kwonly - self.entry.signature.num_fixed_args()
+ star_arg_addr = self.arg_address(self.star_arg)
+ starstar_arg_addr = self.arg_address(self.starstar_arg)
+ code.use_utility_code(get_starargs_utility_code)
+ code.putln(
+ "if (__Pyx_GetStarArgs(&%s, &%s, %s, %s, %s, %s, %s) < 0) return %s;" % (
+ Naming.args_cname,
+ Naming.kwds_cname,
+ Naming.kwdlist_cname,
+ nargs,
+ star_arg_addr,
+ starstar_arg_addr,
+ self.reqd_kw_flags_cname,
+ self.error_value()))
+
+ def generate_argument_conversion_code(self, code):
+ # Generate code to convert arguments from
+ # signature type to declared type, if needed.
+ for arg in self.args:
+ if arg.needs_conversion:
+ self.generate_arg_conversion(arg, code)
+
+ def generate_arg_conversion(self, arg, code):
+ # Generate conversion code for one argument.
+ old_type = arg.hdr_type
+ new_type = arg.type
+ if old_type.is_pyobject:
+ self.generate_arg_conversion_from_pyobject(arg, code)
+ elif new_type.is_pyobject:
+ self.generate_arg_conversion_to_pyobject(arg, code)
+ else:
+ if new_type.assignable_from(old_type):
+ code.putln(
+ "%s = %s;" % (arg.entry.cname, arg.hdr_cname))
+ else:
+ error(arg.pos,
+ "Cannot convert argument from '%s' to '%s'" %
+ (old_type, new_type))
+
+ def generate_arg_conversion_from_pyobject(self, arg, code):
+ new_type = arg.type
+ func = new_type.from_py_function
+ if func:
+ code.putln("%s = %s(%s); if (PyErr_Occurred()) %s" % (
+ arg.entry.cname,
+ func,
+ arg.hdr_cname,
+ code.error_goto(arg.pos)))
+ else:
+ error(arg.pos,
+ "Cannot convert Python object argument to type '%s'"
+ % new_type)
+
+ def generate_arg_conversion_to_pyobject(self, arg, code):
+ old_type = arg.hdr_type
+ func = old_type.to_py_function
+ if func:
+ code.putln("%s = %s(%s); if (!%s) %s" % (
+ arg.entry.cname,
+ func,
+ arg.hdr_cname,
+ arg.entry.cname,
+ code.error_goto(arg.pos)))
+ else:
+ error(arg.pos,
+ "Cannot convert argument of type '%s' to Python object"
+ % old_type)
+
+ def generate_argument_type_tests(self, code):
+ # Generate type tests for args whose signature
+ # type is PyObject * and whose declared type is
+ # a subtype thereof.
+ for arg in self.args:
+ if arg.needs_type_test:
+ self.generate_arg_type_test(arg, code)
+
+ def generate_arg_type_test(self, arg, code):
+ # Generate type test for one argument.
+ if arg.type.typeobj_is_available():
+ typeptr_cname = arg.type.typeptr_cname
+ arg_code = "((PyObject *)%s)" % arg.entry.cname
+ code.use_utility_code(arg_type_test_utility_code)
+ code.putln(
+ 'if (!__Pyx_ArgTypeTest(%s, %s, %d, "%s")) %s' % (
+ arg_code,
+ typeptr_cname,
+ #not arg.not_none,
+ arg.allow_none <> False,
+ arg.name,
+ code.error_goto(arg.pos)))
+ if arg.allow_none is None:
+ one_time_warning(arg.pos, 'or_none',
+ "'not None' will become the default in a future version of Pyrex. "
+ "Use 'or None' to allow passing None.")
+ else:
+ error(arg.pos, "Cannot test type of extern C class "
+ "without type object name specification")
+
+ def generate_execution_code(self, code):
+ # Evaluate and store argument default values
+ for arg in self.args:
+ default = arg.default
+ if default:
+ default.generate_evaluation_code(code)
+ default.make_owned_reference(code)
+ code.putln(
+ "%s = %s;" % (
+ arg.default_entry.cname,
+ default.result_as(arg.default_entry.type)))
+ default.generate_post_assignment_code(code)
+# if default.is_temp and default.type.is_pyobject:
+# code.putln(
+# "%s = 0;" %
+# default.result())
+ # For Python class methods, create and store function object
+ if self.assmt:
+ self.assmt.generate_execution_code(code)
+
+ def error_value(self):
+ return self.entry.signature.error_value
+
+ def caller_will_check_exceptions(self):
+ return 1
+
+
+class PyClassDefNode(StatNode, BlockNode):
+ # A Python class definition.
+ #
+ # name string Name of the class
+ # doc string or None
+ # body StatNode Attribute definition code
+ # entry Symtab.Entry
+ # scope PyClassScope
+ #
+ # The following subnodes are constructed internally:
+ #
+ # dict DictNode Class dictionary
+ # classobj ClassNode Class object
+ # target NameNode Variable to assign class object to
+
+ def __init__(self, pos, name, bases, doc, body):
+ StatNode.__init__(self, pos)
+ self.name = name
+ self.doc = doc
+ self.body = body
+ import ExprNodes
+ self.dict = ExprNodes.DictNode(pos, key_value_pairs = [])
+ if self.doc:
+ doc_node = ExprNodes.StringNode(pos, value = self.doc)
+ else:
+ doc_node = None
+ self.classobj = ExprNodes.ClassNode(pos,
+ name = ExprNodes.StringNode(pos, value = name),
+ bases = bases, dict = self.dict, doc = doc_node)
+ self.target = ExprNodes.NameNode(pos, name = name)
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ self.dict.analyse_expressions(env)
+ self.classobj.analyse_expressions(env)
+ genv = env.global_scope()
+ cenv = PyClassScope(name = self.name, outer_scope = genv)
+ cenv.class_dict_cname = self.dict.result()
+ cenv.class_obj_cname = self.classobj.result()
+ self.scope = cenv
+ self.body.analyse_declarations(cenv)
+ self.body.analyse_expressions(cenv)
+ self.target.analyse_target_expression(env, self.classobj)
+ self.dict.release_temp(env)
+ #self.classobj.release_temp(env)
+ #self.target.release_target_temp(env)
+
+ def generate_function_definitions(self, env, code):
+ #self.generate_py_string_decls(self.scope, code)
+ self.body.generate_function_definitions(
+ self.scope, code)
+
+ def generate_execution_code(self, code):
+ self.dict.generate_evaluation_code(code)
+ self.classobj.generate_evaluation_code(code)
+ self.body.generate_execution_code(code)
+ self.target.generate_assignment_code(self.classobj, code)
+ self.dict.generate_disposal_code(code)
+
+
+class CClassDefNode(StatNode):
+ # An extension type definition.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # typedef_flag boolean
+ # api boolean
+ # module_name string or None For import of extern type objects
+ # class_name string Unqualified name of class
+ # as_name string or None Name to declare as in this scope
+ # base_class_module string or None Module containing the base class
+ # base_class_name string or None Name of the base class
+ # options CClassOptions:
+ # objstruct_name string or None Specified C name of object struct
+ # typeobj_name string or None Specified C name of type object
+ # no_gc boolean Suppress GC support
+ # in_pxd boolean Is in a .pxd file
+ # doc string or None
+ # body StatNode or None
+ # entry Symtab.Entry
+ # base_type PyExtensionType or None
+
+ entry = None
+
+ def analyse_declarations(self, env):
+ #print "CClassDefNode.analyse_declarations:", self.class_name
+ #print "...visibility =", self.visibility
+ #print "...module_name =", self.module_name
+ if env.in_cinclude and not self.options.objstruct_cname:
+ error(self.pos, "Object struct name specification required for "
+ "C class defined in 'extern from' block")
+ self.base_type = None
+ has_body = self.body is not None
+ if self.base_class_name:
+ if self.base_class_module:
+ base_class_scope = env.find_module(self.base_class_module, self.pos)
+ else:
+ base_class_scope = env
+ if base_class_scope:
+ base_class_entry = base_class_scope.find(self.base_class_name, self.pos)
+ if base_class_entry:
+ if not base_class_entry.is_type:
+ error(self.pos, "'%s' is not a type name" % self.base_class_name)
+ elif not base_class_entry.type.is_extension_type:
+ error(self.pos, "'%s' is not an extension type" % self.base_class_name)
+ elif has_body and base_class_entry.visibility <> 'extern' and not base_class_entry.type.is_defined():
+ error(self.pos, "Base class '%s' is incomplete" % self.base_class_name)
+ else:
+ self.base_type = base_class_entry.type
+ if self.module_name and self.visibility <> 'extern':
+ module_path = self.module_name.split(".")
+ home_scope = env.find_imported_module(module_path, self.pos)
+ if not home_scope:
+ return
+ else:
+ home_scope = env
+ self.entry = home_scope.declare_c_class(
+ name = self.class_name,
+ pos = self.pos,
+ defining = has_body and self.in_pxd,
+ implementing = has_body and not self.in_pxd,
+ module_name = self.module_name,
+ base_type = self.base_type,
+ visibility = self.visibility,
+ typedef_flag = self.typedef_flag,
+ api = self.api,
+ options = self.options)
+ if home_scope is not env and self.visibility == 'extern':
+ env.add_imported_entry(self.class_name, self.entry, pos)
+ scope = self.entry.type.scope
+ if self.doc:
+ scope.doc = self.doc
+ if has_body:
+ self.body.analyse_declarations(scope)
+ if self.in_pxd:
+ scope.defined = 1
+ else:
+ scope.implemented = 1
+ env.allocate_vtable_names(self.entry)
+
+ def analyse_expressions(self, env):
+ if self.body:
+ self.body.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ if self.entry and self.body:
+# self.body.generate_function_definitions(
+# self.entry.type.scope, code)
+ self.body.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ # This is needed to generate evaluation code for
+ # default values of method arguments.
+ if self.body:
+ self.body.generate_execution_code(code)
+
+
+class PropertyNode(StatNode):
+ # Definition of a property in an extension type.
+ #
+ # name string
+ # doc string or None Doc string
+ # body StatListNode
+
+ def analyse_declarations(self, env):
+ #print "PropertyNode.analyse_declarations:", env ###
+ entry = env.declare_property(self.name, self.doc, self.pos)
+ if entry:
+ #if self.doc:
+ # doc_entry = env.get_string_const(self.doc)
+ # entry.doc_cname = doc_entry.cname
+ self.body.analyse_declarations(entry.scope)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ self.body.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class GlobalNode(StatNode):
+ # Global variable declaration.
+ #
+ # names [string]
+
+ def analyse_declarations(self, env):
+ for name in self.names:
+ env.declare_global(name, self.pos)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class ExprStatNode(StatNode):
+ # Expression used as a statement.
+ #
+ # expr ExprNode
+
+ def analyse_expressions(self, env):
+ self.expr.analyse_expressions(env)
+ self.expr.release_temp(env)
+
+ def generate_execution_code(self, code):
+ self.expr.generate_evaluation_code(code)
+ if not self.expr.is_temp and self.expr.result():
+ code.putln("%s;" % self.expr.result())
+ self.expr.generate_disposal_code(code)
+
+
+class AssignmentNode(StatNode):
+ # Abstract base class for assignment nodes.
+ #
+ # The analyse_expressions and generate_execution_code
+ # phases of assignments are split into two sub-phases
+ # each, to enable all the right hand sides of a
+ # parallel assignment to be evaluated before assigning
+ # to any of the left hand sides.
+
+ def analyse_expressions(self, env):
+ self.analyse_types(env)
+ self.allocate_rhs_temps(env)
+ self.allocate_lhs_temps(env)
+
+ def generate_execution_code(self, code):
+ self.generate_rhs_evaluation_code(code)
+ self.generate_assignment_code(code)
+
+
+class SingleAssignmentNode(AssignmentNode):
+ # The simplest case:
+ #
+ # a = b
+ #
+ # lhs ExprNode Left hand side
+ # rhs ExprNode Right hand side
+
+ def analyse_declarations(self, env):
+ self.lhs.analyse_target_declaration(env)
+
+ def analyse_types(self, env, use_temp = 0):
+ self.rhs.analyse_types(env)
+ self.lhs.analyse_target_types(env)
+ self.lhs.gil_assignment_check(env)
+ self.rhs = self.rhs.coerce_to(self.lhs.type, env)
+ if use_temp:
+ self.rhs = self.rhs.coerce_to_temp(env)
+
+ def allocate_rhs_temps(self, env):
+ self.rhs.allocate_temps(env)
+
+ def allocate_lhs_temps(self, env):
+ self.lhs.allocate_target_temps(env, self.rhs)
+
+ def generate_rhs_evaluation_code(self, code):
+ self.rhs.generate_evaluation_code(code)
+
+ def generate_assignment_code(self, code):
+ self.lhs.generate_assignment_code(self.rhs, code)
+
+
+class AugmentedAssignmentNode(SingleAssignmentNode):
+ # An in-place operation:
+ #
+ # a op= b
+ #
+ # lhs ExprNode Left hand side
+ # operator string
+ # rhs ExprNode Right hand side
+
+ def analyse_types(self, env):
+ op = self.operator
+ self.rhs.analyse_types(env)
+ self.lhs.analyse_inplace_types(env)
+ type = self.lhs.type
+ if type.is_pyobject:
+ type = py_object_type
+ else:
+ if type.is_ptr and (op == '+=' or op == '-='):
+ type = c_int_type
+ elif op == "**=":
+ error(self.pos, "**= operator not supported for non-Python types")
+ return
+ self.rhs = self.rhs.coerce_to(type, env)
+
+ def allocate_lhs_temps(self, env):
+ self.lhs.allocate_inplace_target_temps(env, self.rhs)
+
+ def generate_assignment_code(self, code):
+ self.lhs.generate_inplace_assignment_code(self.operator, self.rhs, code)
+
+
+class CascadedAssignmentNode(AssignmentNode):
+ # An assignment with multiple left hand sides:
+ #
+ # a = b = c
+ #
+ # lhs_list [ExprNode] Left hand sides
+ # rhs ExprNode Right hand sides
+ #
+ # Used internally:
+ #
+ # coerced_rhs_list [ExprNode] RHS coerced to type of each LHS
+
+ def analyse_declarations(self, env):
+ for lhs in self.lhs_list:
+ lhs.analyse_target_declaration(env)
+
+ def analyse_types(self, env, use_temp = 0):
+ self.rhs.analyse_types(env)
+ if use_temp:
+ self.rhs = self.rhs.coerce_to_temp(env)
+ else:
+ self.rhs = self.rhs.coerce_to_simple(env)
+ from ExprNodes import CloneNode
+ self.coerced_rhs_list = []
+ for lhs in self.lhs_list:
+ lhs.analyse_target_types(env)
+ lhs.gil_assignment_check(env)
+ rhs = CloneNode(self.rhs)
+ rhs = rhs.coerce_to(lhs.type, env)
+ self.coerced_rhs_list.append(rhs)
+
+ def allocate_rhs_temps(self, env):
+ self.rhs.allocate_temps(env)
+
+ def allocate_lhs_temps(self, env):
+ for lhs, rhs in zip(self.lhs_list, self.coerced_rhs_list):
+ rhs.allocate_temps(env)
+ lhs.allocate_target_temps(env, rhs)
+ #lhs.release_target_temp(env)
+ #rhs.release_temp(env)
+ self.rhs.release_temp(env)
+
+ def generate_rhs_evaluation_code(self, code):
+ self.rhs.generate_evaluation_code(code)
+
+ def generate_assignment_code(self, code):
+ for i in range(len(self.lhs_list)):
+ lhs = self.lhs_list[i]
+ rhs = self.coerced_rhs_list[i]
+ rhs.generate_evaluation_code(code)
+ lhs.generate_assignment_code(rhs, code)
+ # Assignment has disposed of the cloned RHS
+ self.rhs.generate_disposal_code(code)
+
+class ParallelAssignmentNode(AssignmentNode):
+ # A combined packing/unpacking assignment:
+ #
+ # a, b, c = d, e, f
+ #
+ # This has been rearranged by the parser into
+ #
+ # a = d ; b = e ; c = f
+ #
+ # but we must evaluate all the right hand sides
+ # before assigning to any of the left hand sides.
+ #
+ # stats [AssignmentNode] The constituent assignments
+
+ def analyse_declarations(self, env):
+ for stat in self.stats:
+ stat.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ for stat in self.stats:
+ stat.analyse_types(env, use_temp = 1)
+ stat.allocate_rhs_temps(env)
+ for stat in self.stats:
+ stat.allocate_lhs_temps(env)
+
+ def generate_execution_code(self, code):
+ for stat in self.stats:
+ stat.generate_rhs_evaluation_code(code)
+ for stat in self.stats:
+ stat.generate_assignment_code(code)
+
+
+class PrintStatNode(StatNode):
+ # print statement
+ #
+ # args [ExprNode]
+ # ends_with_comma boolean
+
+ def analyse_expressions(self, env):
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg.analyse_types(env)
+ arg = arg.coerce_to_pyobject(env)
+ arg.allocate_temps(env)
+ arg.release_temp(env)
+ self.args[i] = arg
+# env.use_utility_code(printing_utility_code)
+ self.gil_check(env)
+
+ gil_message = "Python print statement"
+
+ def generate_execution_code(self, code):
+ for arg in self.args:
+ arg.generate_evaluation_code(code)
+ code.use_utility_code(printing_utility_code)
+ code.putln(
+ "if (__Pyx_PrintItem(%s) < 0) %s" % (
+ arg.py_result(),
+ code.error_goto(self.pos)))
+ arg.generate_disposal_code(code)
+ if not self.ends_with_comma:
+ code.use_utility_code(printing_utility_code)
+ code.putln(
+ "if (__Pyx_PrintNewline() < 0) %s" %
+ code.error_goto(self.pos))
+
+
+class DelStatNode(StatNode):
+ # del statement
+ #
+ # args [ExprNode]
+
+ def analyse_declarations(self, env):
+ for arg in self.args:
+ arg.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ for arg in self.args:
+ arg.analyse_target_expression(env, None)
+ type = arg.type
+ if not (type.is_pyobject
+ or (type.is_ptr and type.base_type.is_struct_or_union
+ and type.base_type.scope.is_cplus)):
+ error(arg.pos, "'del' can only be applied to Python object or pointer to C++ type")
+ if type.is_pyobject:
+ self.gil_check(env)
+
+ gil_message = "Deleting Python object"
+
+ def generate_execution_code(self, code):
+ for arg in self.args:
+ if arg.type.is_pyobject:
+ arg.generate_deletion_code(code)
+ else:
+ arg.generate_evaluation_code(code)
+ code.putln("delete %s;" % arg.result())
+ arg.generate_disposal_code(code)
+
+
+class PassStatNode(StatNode):
+ # pass statement
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class BreakStatNode(StatNode):
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ if not code.break_label:
+ error(self.pos, "break statement not inside loop")
+ else:
+ #code.putln(
+ # "goto %s;" %
+ # code.break_label)
+ code.put_goto(code.break_label)
+
+
+class ContinueStatNode(StatNode):
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ if code.in_try_finally:
+ error(self.pos, "continue statement inside try of try...finally")
+ elif not code.continue_label:
+ error(self.pos, "continue statement not inside loop")
+ else:
+ #code.putln(
+ # "goto %s;" %
+ # code.continue_label)
+ code.put_goto(code.continue_label)
+
+
+class ReturnStatNode(StatNode):
+ # return statement
+ #
+ # value ExprNode or None
+ # return_type PyrexType
+ # temps_in_use [Entry] Temps in use at time of return
+
+ def analyse_expressions(self, env):
+ return_type = env.return_type
+ self.return_type = return_type
+ self.temps_in_use = env.temps_in_use()
+ if not return_type:
+ error(self.pos, "Return not inside a function body")
+ return
+ if self.value:
+ self.value.analyse_types(env)
+ if return_type.is_void or return_type.is_returncode:
+ error(self.value.pos,
+ "Return with value in void function")
+ else:
+ self.value = self.value.coerce_to(env.return_type, env)
+ self.value.allocate_temps(env)
+ self.value.release_temp(env)
+ else:
+ if (not return_type.is_void
+ and not return_type.is_pyobject
+ and not return_type.is_returncode):
+ error(self.pos, "Return value required")
+ if return_type.is_pyobject:
+ self.gil_check(env)
+
+ gil_message = "Returning Python object"
+
+ def generate_execution_code(self, code):
+ if not self.return_type:
+ # error reported earlier
+ return
+ if self.value:
+ self.value.generate_evaluation_code(code)
+ self.value.make_owned_reference(code)
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ self.value.result_as(self.return_type)))
+ self.value.generate_post_assignment_code(code)
+ else:
+ if self.return_type.is_pyobject:
+ code.put_init_to_py_none(Naming.retval_cname, self.return_type)
+ elif self.return_type.is_returncode:
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ self.return_type.default_value))
+ for entry in self.temps_in_use:
+ code.put_var_decref_clear(entry)
+ #code.putln(
+ # "goto %s;" %
+ # code.return_label)
+ code.put_goto(code.return_label)
+
+
+class RaiseStatNode(StatNode):
+ # raise statement
+ #
+ # exc_type ExprNode or None
+ # exc_value ExprNode or None
+ # exc_tb ExprNode or None
+
+ def analyse_expressions(self, env):
+ if self.exc_type:
+ self.exc_type.analyse_types(env)
+ self.exc_type = self.exc_type.coerce_to_pyobject(env)
+ self.exc_type.allocate_temps(env)
+ if self.exc_value:
+ self.exc_value.analyse_types(env)
+ self.exc_value = self.exc_value.coerce_to_pyobject(env)
+ self.exc_value.allocate_temps(env)
+ if self.exc_tb:
+ self.exc_tb.analyse_types(env)
+ self.exc_tb = self.exc_tb.coerce_to_pyobject(env)
+ self.exc_tb.allocate_temps(env)
+ if self.exc_type:
+ self.exc_type.release_temp(env)
+ if self.exc_value:
+ self.exc_value.release_temp(env)
+ if self.exc_tb:
+ self.exc_tb.release_temp(env)
+ self.gil_check(env)
+
+ gil_message = "Raising exception"
+
+ def generate_execution_code(self, code):
+ if self.exc_type:
+ self.exc_type.generate_evaluation_code(code)
+ type_code = self.exc_type.py_result()
+ else:
+ type_code = 0
+ if self.exc_value:
+ self.exc_value.generate_evaluation_code(code)
+ value_code = self.exc_value.py_result()
+ else:
+ value_code = "0"
+ if self.exc_tb:
+ self.exc_tb.generate_evaluation_code(code)
+ tb_code = self.exc_tb.py_result()
+ else:
+ tb_code = "0"
+ code.use_utility_code(raise_utility_code)
+ code.putln(
+ "__Pyx_Raise(%s, %s, %s);" % (
+ type_code,
+ value_code,
+ tb_code))
+ if self.exc_type:
+ self.exc_type.generate_disposal_code(code)
+ if self.exc_value:
+ self.exc_value.generate_disposal_code(code)
+ if self.exc_tb:
+ self.exc_tb.generate_disposal_code(code)
+ code.putln(
+ code.error_goto(self.pos))
+
+
+class ReraiseStatNode(StatNode):
+
+ def analyse_expressions(self, env):
+ env.reraise_used = 1
+ self.gil_check(env)
+
+ gil_message = "Raising exception"
+
+ def generate_execution_code(self, code):
+ vars = code.exc_vars
+ if vars:
+ tvars = tuple(vars)
+ code.putln("PyErr_Restore(%s, %s, %s);" % tvars)
+ code.putln("%s = %s = %s = 0;" % tvars)
+ code.putln(code.error_goto(self.pos))
+ else:
+ error(self.pos, "Reraise not inside except clause")
+
+
+class AssertStatNode(StatNode):
+ # assert statement
+ #
+ # cond ExprNode
+ # value ExprNode or None
+
+ def analyse_expressions(self, env):
+ self.cond = self.cond.analyse_boolean_expression(env)
+ if self.value:
+ self.value.analyse_types(env)
+ self.value = self.value.coerce_to_pyobject(env)
+ self.value.allocate_temps(env)
+ self.cond.release_temp(env)
+ if self.value:
+ self.value.release_temp(env)
+ self.gil_check(env)
+
+ gil_message = "Raising exception"
+
+ def generate_execution_code(self, code):
+ code.putln("#ifndef PYREX_WITHOUT_ASSERTIONS")
+ self.cond.generate_evaluation_code(code)
+ code.putln(
+ "if (!%s) {" %
+ self.cond.result())
+ if self.value:
+ self.value.generate_evaluation_code(code)
+ if self.value:
+ code.putln(
+ "PyErr_SetObject(PyExc_AssertionError, %s);" %
+ self.value.py_result())
+ else:
+ code.putln(
+ "PyErr_SetNone(PyExc_AssertionError);")
+ code.putln(
+ code.error_goto(self.pos))
+ code.putln(
+ "}")
+ self.cond.generate_disposal_code(code)
+ # Disposal code for value not needed because exception always raised
+ #if self.value:
+ # self.value.generate_disposal_code(code)
+ code.putln("#endif")
+
+class IfStatNode(StatNode):
+ # if statement
+ #
+ # if_clauses [IfClauseNode]
+ # else_clause StatNode or None
+
+ def analyse_declarations(self, env):
+ for if_clause in self.if_clauses:
+ if_clause.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ for if_clause in self.if_clauses:
+ if_clause.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+
+ def generate_execution_code(self, code):
+ end_label = code.new_label()
+ for if_clause in self.if_clauses:
+ if_clause.generate_execution_code(code, end_label)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(end_label)
+
+
+class IfClauseNode(Node):
+ # if or elif clause in an if statement
+ #
+ # condition ExprNode
+ # body StatNode
+
+ def analyse_declarations(self, env):
+ self.condition.analyse_declarations(env)
+ self.body.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.condition = \
+ self.condition.analyse_temp_boolean_expression(env)
+ self.condition.release_temp(env)
+ self.body.analyse_expressions(env)
+
+ def generate_execution_code(self, code, end_label):
+ self.condition.generate_evaluation_code(code)
+ code.putln(
+ "if (%s) {" %
+ self.condition.result())
+ self.body.generate_execution_code(code)
+ #code.putln(
+ # "goto %s;" %
+ # end_label)
+ code.put_goto(end_label)
+ code.putln("}")
+
+
+class WhileStatNode(StatNode):
+ # while statement
+ #
+ # condition ExprNode
+ # body StatNode
+ # else_clause StatNode
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.condition = \
+ self.condition.analyse_temp_boolean_expression(env)
+ self.condition.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ self.body.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ code.putln(
+ "while (1) {")
+ self.condition.generate_evaluation_code(code)
+ code.putln(
+ "if (!%s) break;" %
+ self.condition.result())
+ self.body.generate_execution_code(code)
+ code.put_label(code.continue_label)
+ code.putln("}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+
+
+class ForInStatNode(StatNode):
+ # for statement
+ #
+ # target ExprNode
+ # iterator IteratorNode
+ # body StatNode
+ # else_clause StatNode
+ # item NextNode used internally
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.iterator.analyse_expressions(env)
+ self.target.analyse_target_types(env)
+ self.item = ExprNodes.NextNode(self.iterator, env)
+ self.item = self.item.coerce_to(self.target.type, env)
+ self.item.allocate_temps(env)
+ self.target.allocate_target_temps(env, self.item)
+ #self.item.release_temp(env)
+ #self.target.release_target_temp(env)
+ self.body.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+ self.iterator.release_temp(env)
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ self.iterator.generate_evaluation_code(code)
+ code.putln(
+ "for (;;) {")
+ self.item.generate_evaluation_code(code)
+ self.target.generate_assignment_code(self.item, code)
+ self.body.generate_execution_code(code)
+ code.put_label(code.continue_label)
+ code.putln(
+ "}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+ self.iterator.generate_disposal_code(code)
+
+
+class IntegerForStatNode(StatNode):
+ # for expr rel name rel expr
+ #
+ # bound1 ExprNode
+ # relation1 string
+ # target NameNode
+ # relation2 string
+ # bound2 ExprNode
+ # body StatNode
+ # else_clause StatNode or None
+ #
+ # Used internally:
+ #
+ # is_py_target bool
+ # loopvar_name string
+ # py_loopvar_node PyTempNode or None
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.target.analyse_target_types(env)
+ self.bound1.analyse_types(env)
+ self.bound2.analyse_types(env)
+ self.bound1 = self.bound1.coerce_to_integer(env)
+ self.bound2 = self.bound2.coerce_to_integer(env)
+ if not (self.bound2.is_name or self.bound2.is_literal):
+ self.bound2 = self.bound2.coerce_to_temp(env)
+ target_type = self.target.type
+ if not (target_type.is_pyobject or target_type.is_int):
+ error(self.target.pos,
+ "Integer for-loop variable must be of type int or Python object")
+ #if not (target_type.is_pyobject
+ # or target_type.assignable_from(PyrexTypes.c_int_type)):
+ # error(self.target.pos,
+ # "Cannot assign integer to variable of type '%s'" % target_type)
+ if target_type.is_int:
+ self.is_py_target = 0
+ self.loopvar_name = self.target.entry.cname
+ self.py_loopvar_node = None
+ else:
+ self.is_py_target = 1
+ c_loopvar_node = ExprNodes.TempNode(self.pos,
+ PyrexTypes.c_long_type, env)
+ c_loopvar_node.allocate_temps(env)
+ self.loopvar_name = c_loopvar_node.result()
+ self.py_loopvar_node = \
+ ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env)
+ self.bound1.allocate_temps(env)
+ self.bound2.allocate_temps(env)
+ if self.is_py_target:
+ self.py_loopvar_node.allocate_temps(env)
+ self.target.allocate_target_temps(env, self.py_loopvar_node)
+ #self.target.release_target_temp(env)
+ #self.py_loopvar_node.release_temp(env)
+ self.body.analyse_expressions(env)
+ if self.is_py_target:
+ c_loopvar_node.release_temp(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+ self.bound1.release_temp(env)
+ self.bound2.release_temp(env)
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ self.bound1.generate_evaluation_code(code)
+ self.bound2.generate_evaluation_code(code)
+ offset, incop = self.relation_table[self.relation1]
+ code.putln(
+ "for (%s = %s%s; %s %s %s; %s%s) {" % (
+ self.loopvar_name,
+ self.bound1.result(), offset,
+ self.loopvar_name, self.relation2, self.bound2.result(),
+ incop, self.loopvar_name))
+ if self.py_loopvar_node:
+ self.py_loopvar_node.generate_evaluation_code(code)
+ self.target.generate_assignment_code(self.py_loopvar_node, code)
+ self.body.generate_execution_code(code)
+ code.put_label(code.continue_label)
+ code.putln("}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+ self.bound1.generate_disposal_code(code)
+ self.bound2.generate_disposal_code(code)
+
+ relation_table = {
+ # {relop : (initial offset, increment op)}
+ '<=': ("", "++"),
+ '<' : ("+1", "++"),
+ '>=': ("", "--"),
+ '>' : ("-1", "--")
+ }
+
+
+class TryExceptStatNode(StatNode):
+ # try .. except statement
+ #
+ # body StatNode
+ # except_clauses [ExceptClauseNode]
+ # else_clause StatNode or None
+ # cleanup_list [Entry] temps to clean up on error
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ for except_clause in self.except_clauses:
+ except_clause.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+ self.gil_check(env)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+ self.cleanup_list = env.free_temp_entries[:]
+ for except_clause in self.except_clauses:
+ except_clause.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+ self.gil_check(env)
+
+ gil_message = "Try-except statement"
+
+ def generate_execution_code(self, code):
+ old_error_label = code.new_error_label()
+ our_error_label = code.error_label
+ end_label = code.new_label()
+ code.putln(
+ "/*try:*/ {")
+ self.body.generate_execution_code(code)
+ code.putln(
+ "}")
+ code.error_label = old_error_label
+ if self.else_clause:
+ code.putln(
+ "/*else:*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln(
+ "}")
+ code.put_goto(end_label)
+ code.put_label(our_error_label)
+ code.put_var_xdecrefs_clear(self.cleanup_list)
+ default_clause_seen = 0
+ for except_clause in self.except_clauses:
+ if not except_clause.pattern:
+ default_clause_seen = 1
+ else:
+ if default_clause_seen:
+ error(except_clause.pos, "Default except clause not last")
+ except_clause.generate_handling_code(code, end_label)
+ if not default_clause_seen:
+ code.put_goto(code.error_label)
+ code.put_label(end_label)
+
+
+class ExceptClauseNode(Node):
+ # Part of try ... except statement.
+ #
+ # pattern ExprNode
+ # exc_target ExprNode or None
+ # tb_target ExprNode or None
+ # body StatNode
+ # match_flag string result of exception match
+ # exc_value ExcValueNode used internally
+ # tb_value ExcValueNode used internally
+ # function_name string qualified name of enclosing function
+ # exc_vars (string * 3) local exception variables
+ # reraise_used boolean body contains reraise statement
+
+ def analyse_declarations(self, env):
+ if self.exc_target:
+ self.exc_target.analyse_target_declaration(env)
+ if self.tb_target:
+ self.tb_target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ genv = env.global_scope()
+ self.function_name = env.qualified_name
+ if self.pattern:
+ self.pattern.analyse_expressions(env)
+ self.pattern = self.pattern.coerce_to_pyobject(env)
+ self.match_flag = env.allocate_temp(PyrexTypes.c_int_type)
+ self.pattern.release_temp(env)
+ env.release_temp(self.match_flag)
+ self.exc_vars = [env.allocate_temp(py_object_type) for i in xrange(3)]
+ self.exc_value = self.analyse_target(env, self.exc_target, 1)
+ self.tb_value = self.analyse_target(env, self.tb_target, 2)
+ old_reraise_used = env.reraise_used
+ env.reraise_used = False
+ self.body.analyse_expressions(env)
+ self.reraise_used = env.reraise_used
+ env.reraise_used = old_reraise_used
+ for var in self.exc_vars:
+ env.release_temp(var)
+
+ def analyse_target(self, env, target, var_no):
+ if target:
+ import ExprNodes
+ value = ExprNodes.ExcValueNode(self.pos, env, self.exc_vars[var_no])
+ value.allocate_temps(env)
+ target.analyse_target_expression(env, value)
+ return value
+
+ def generate_handling_code(self, code, end_label):
+ code.mark_pos(self.pos)
+ if self.pattern:
+ self.pattern.generate_evaluation_code(code)
+ code.putln(
+ "%s = PyErr_ExceptionMatches(%s);" % (
+ self.match_flag,
+ self.pattern.py_result()))
+ self.pattern.generate_disposal_code(code)
+ code.putln(
+ "if (%s) {" %
+ self.match_flag)
+ else:
+ code.putln(
+ "/*except:*/ {")
+ any_bindings = self.exc_target or self.tb_target
+ exc_vars_used = any_bindings or self.reraise_used
+ if exc_vars_used:
+ if any_bindings:
+ code.putln(
+ '%s; __Pyx_AddTraceback("%s");' % (
+ code.error_setup(self.pos),
+ self.function_name))
+ exc_args = "&%s, &%s, &%s" % tuple(self.exc_vars)
+ code.putln("PyErr_Fetch(%s);" % exc_args)
+ if any_bindings:
+ code.use_utility_code(normalize_exception_utility_code)
+ code.putln("if (__Pyx_NormalizeException(%s) < 0) %s" % (exc_args,
+ code.error_goto(self.pos)))
+ if self.exc_target:
+ self.exc_value.generate_evaluation_code(code)
+ self.exc_target.generate_assignment_code(self.exc_value, code)
+ if self.tb_target:
+ self.tb_value.generate_evaluation_code(code)
+ self.tb_target.generate_assignment_code(self.tb_value, code)
+ old_exc_vars = code.exc_vars
+ code.exc_vars = self.exc_vars
+ self.body.generate_execution_code(code)
+ code.exc_vars = old_exc_vars
+ if exc_vars_used:
+ for var in self.exc_vars:
+ code.putln("Py_XDECREF(%s); %s = 0;" % (var, var))
+ code.put_goto(end_label)
+ code.putln(
+ "}")
+
+
+class TryFinallyStatNode(StatNode):
+ # try ... finally statement
+ #
+ # body StatNode
+ # finally_clause StatNode
+ #
+ # cleanup_list [Entry] temps to clean up on error
+ #
+ # The plan is that we funnel all continue, break
+ # return and error gotos into the beginning of the
+ # finally block, setting a variable to remember which
+ # one we're doing. At the end of the finally block, we
+ # switch on the variable to figure out where to go.
+ # In addition, if we're doing an error, we save the
+ # exception on entry to the finally block and restore
+ # it on exit.
+
+ preserve_exception = 1
+
+ disallow_continue_in_try_finally = 0
+ # There doesn't seem to be any point in disallowing
+ # continue in the try block, since we have no problem
+ # handling it.
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ self.finally_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+ self.cleanup_list = env.free_temp_entries[:]
+ self.finally_clause.analyse_expressions(env)
+ self.gil_check(env)
+
+ gil_message = "Try-finally statement"
+
+ def generate_execution_code(self, code):
+ old_error_label = code.error_label
+ old_labels = code.all_new_labels()
+ new_labels = code.get_all_labels()
+ new_error_label = code.error_label
+ catch_label = code.new_label()
+ code.putln(
+ "/*try:*/ {")
+ if self.disallow_continue_in_try_finally:
+ was_in_try_finally = code.in_try_finally
+ code.in_try_finally = 1
+ self.body.generate_execution_code(code)
+ if self.disallow_continue_in_try_finally:
+ code.in_try_finally = was_in_try_finally
+ code.putln(
+ "}")
+ code.putln(
+ "/*finally:*/ {")
+ cases_used = []
+ error_label_used = 0
+ for i, new_label in enumerate(new_labels):
+ if new_label in code.labels_used:
+ cases_used.append(i)
+ if new_label == new_error_label:
+ error_label_used = 1
+ error_label_case = i
+ if cases_used:
+ code.putln(
+ "int __pyx_why;")
+ if error_label_used and self.preserve_exception:
+ code.putln(
+ "PyObject *%s, *%s, *%s;" % Naming.exc_vars)
+ code.putln(
+ "int %s;" % Naming.exc_lineno_name)
+ code.use_label(catch_label)
+ code.putln(
+ "__pyx_why = 0; goto %s;" % catch_label)
+ for i in cases_used:
+ new_label = new_labels[i]
+ #if new_label and new_label <> "<try>":
+ if new_label == new_error_label and self.preserve_exception:
+ self.put_error_catcher(code,
+ new_error_label, i+1, catch_label)
+ else:
+ code.putln(
+ "%s: __pyx_why = %s; goto %s;" % (
+ new_label,
+ i+1,
+ catch_label))
+ code.put_label(catch_label)
+ code.set_all_labels(old_labels)
+ if error_label_used:
+ code.new_error_label()
+ finally_error_label = code.error_label
+ self.finally_clause.generate_execution_code(code)
+ if error_label_used:
+ if finally_error_label in code.labels_used and self.preserve_exception:
+ over_label = code.new_label()
+ code.put_goto(over_label);
+ code.put_label(finally_error_label)
+ code.putln("if (__pyx_why == %d) {" % (error_label_case + 1))
+ for var in Naming.exc_vars:
+ code.putln("Py_XDECREF(%s);" % var)
+ code.putln("}")
+ code.put_goto(old_error_label)
+ code.put_label(over_label)
+ code.error_label = old_error_label
+ if cases_used:
+ code.putln(
+ "switch (__pyx_why) {")
+ for i in cases_used:
+ old_label = old_labels[i]
+ if old_label == old_error_label and self.preserve_exception:
+ self.put_error_uncatcher(code, i+1, old_error_label)
+ else:
+ code.use_label(old_label)
+ code.putln(
+ "case %s: goto %s;" % (
+ i+1,
+ old_label))
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def put_error_catcher(self, code, error_label, i, catch_label):
+ code.putln(
+ "%s: {" %
+ error_label)
+ code.putln(
+ "__pyx_why = %s;" %
+ i)
+ code.put_var_xdecrefs_clear(self.cleanup_list)
+ code.putln(
+ "PyErr_Fetch(&%s, &%s, &%s);" %
+ Naming.exc_vars)
+ code.putln(
+ "%s = %s;" % (
+ Naming.exc_lineno_name, Naming.lineno_cname))
+ #code.putln(
+ # "goto %s;" %
+ # catch_label)
+ code.put_goto(catch_label)
+ code.putln(
+ "}")
+
+ def put_error_uncatcher(self, code, i, error_label):
+ code.putln(
+ "case %s: {" %
+ i)
+ code.putln(
+ "PyErr_Restore(%s, %s, %s);" %
+ Naming.exc_vars)
+ code.putln(
+ "%s = %s;" % (
+ Naming.lineno_cname, Naming.exc_lineno_name))
+ for var in Naming.exc_vars:
+ code.putln(
+ "%s = 0;" %
+ var)
+ code.put_goto(error_label)
+ code.putln(
+ "}")
+
+
+class GILStatNode(TryFinallyStatNode):
+ # 'with gil' or 'with nogil' statement
+ #
+ # state string 'gil' or 'nogil'
+
+ preserve_exception = 0
+
+ def __init__(self, pos, state, body):
+ self.state = state
+ TryFinallyStatNode.__init__(self, pos,
+ body = body,
+ finally_clause = GILExitNode(pos, state = state))
+
+ def analyse_expressions(self, env):
+ env.global_scope().gil_used = 1
+ was_nogil = env.nogil
+ env.nogil = 1
+ TryFinallyStatNode.analyse_expressions(self, env)
+ env.nogil = was_nogil
+
+ def gil_check(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ code.putln("/*with %s:*/ {" % self.state)
+ if self.state == 'gil':
+ code.putln("PyGILState_STATE _save = PyGILState_Ensure();")
+ else:
+ code.putln("PyThreadState *_save;")
+ code.putln("Py_UNBLOCK_THREADS")
+ TryFinallyStatNode.generate_execution_code(self, code)
+ code.putln("}")
+
+
+class GILExitNode(StatNode):
+ # Used as the 'finally' block in a GILStatNode
+ #
+ # state string 'gil' or 'nogil'
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ if self.state == 'gil':
+ code.putln("PyGILState_Release();")
+ else:
+ code.putln("Py_BLOCK_THREADS")
+
+
+class CImportStatNode(StatNode):
+ # cimport statement
+ #
+ # module_name string Qualified name of module being imported
+ # as_name string or None Name specified in "as" clause, if any
+
+ def analyse_declarations(self, env):
+ module_scope = env.find_module(self.module_name, self.pos)
+ if "." in self.module_name:
+ names = self.module_name.split(".")
+ top_name = names[0]
+ top_module_scope = env.context.find_submodule(top_name)
+ module_scope = top_module_scope
+ for name in names[1:]:
+ submodule_scope = module_scope.find_submodule(name)
+ module_scope.declare_module(name, submodule_scope, self.pos)
+ if not self.as_name:
+ env.add_imported_module(submodule_scope)
+ module_scope = submodule_scope
+ if self.as_name:
+ env.declare_module(self.as_name, module_scope, self.pos)
+ env.add_imported_module(module_scope)
+ else:
+ env.declare_module(top_name, top_module_scope, self.pos)
+ env.add_imported_module(top_module_scope)
+ else:
+ name = self.as_name or self.module_name
+ env.declare_module(name, module_scope, self.pos)
+ env.add_imported_module(module_scope)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FromCImportStatNode(StatNode):
+ # from ... cimport statement
+ #
+ # module_name string Qualified name of module
+ # imported_names Parsing.ImportedName Names to be imported
+
+ def analyse_declarations(self, env):
+ module_scope = env.find_module(self.module_name, self.pos)
+ env.add_imported_module(module_scope)
+ for imp in self.imported_names:
+ kind = imp.kind
+ #entry = module_scope.find(imp.name, imp.pos)
+ entry = module_scope.lookup(imp.name)
+ if entry:
+ if kind and not self.declaration_matches(entry, kind):
+ entry.redeclared(pos)
+ else:
+ if kind == 'struct' or kind == 'union':
+ entry = module_scope.declare_struct_or_union(imp.name,
+ kind = kind, scope = None, typedef_flag = 0, pos = imp.pos)
+ elif kind == 'class':
+ entry = module_scope.declare_c_class(imp.name, pos = imp.pos,
+ module_name = self.module_name)
+ else:
+ error(imp.pos, "Name '%s' not declared in module '%s'"
+ % (imp.name, self.module_name))
+ if entry:
+ local_name = imp.as_name or imp.name
+ env.add_imported_entry(local_name, entry, imp.pos)
+
+ def declaration_matches(self, entry, kind):
+ if not entry.is_type:
+ return 0
+ type = entry.type
+ if kind == 'class':
+ if not type.is_extension_type:
+ return 0
+ else:
+ if not type.is_struct_or_union:
+ return 0
+ if kind <> type.kind:
+ return 0
+ return 1
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FromImportStatNode(StatNode):
+ # from ... import statement
+ #
+ # module ImportNode
+ # items [(string, NameNode)]
+ # #interned_items [(string, NameNode)]
+ # item PyTempNode used internally
+
+ def analyse_declarations(self, env):
+ for _, target in self.items:
+ target.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.module.analyse_expressions(env)
+ self.item = ExprNodes.PyTempNode(self.pos, env)
+ self.item.allocate_temp(env)
+ #self.interned_items = []
+ for name, target in self.items:
+ #self.interned_items.append((env.intern(name), target))
+ target.analyse_target_expression(env, None)
+ self.module.release_temp(env)
+ self.item.release_temp(env)
+
+ def generate_execution_code(self, code):
+ self.module.generate_evaluation_code(code)
+ #for cname, target in self.interned_items:
+ for name, target in self.items:
+ cname = code.intern(name)
+ code.putln(
+ '%s = PyObject_GetAttr(%s, %s); if (!%s) %s' % (
+ self.item.result(),
+ self.module.py_result(),
+ cname,
+ self.item.result(),
+ code.error_goto(self.pos)))
+ target.generate_assignment_code(self.item, code)
+ self.module.generate_disposal_code(code)
+
+#------------------------------------------------------------------------------------
+#
+# Runtime support code
+#
+#------------------------------------------------------------------------------------
+
+#utility_function_predeclarations = \
+#"""
+#typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/
+#typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
+#"""
+
+utility_function_predeclarations = \
+"""
+typedef struct {PyObject **p; int i; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
+"""
+
+#get_name_predeclaration = \
+#"static PyObject *__Pyx_GetName(PyObject *dict, char *name); /*proto*/"
+
+#get_name_interned_predeclaration = \
+#"static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/"
+
+#------------------------------------------------------------------------------------
+
+printing_utility_code = [
+"""
+static int __Pyx_PrintItem(PyObject *); /*proto*/
+static int __Pyx_PrintNewline(void); /*proto*/
+""",r"""
+static PyObject *__Pyx_GetStdout(void) {
+ PyObject *f = PySys_GetObject("stdout");
+ if (!f) {
+ PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout");
+ }
+ return f;
+}
+
+static int __Pyx_PrintItem(PyObject *v) {
+ PyObject *f;
+
+ if (!(f = __Pyx_GetStdout()))
+ return -1;
+ if (PyFile_SoftSpace(f, 1)) {
+ if (PyFile_WriteString(" ", f) < 0)
+ return -1;
+ }
+ if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0)
+ return -1;
+ if (PyString_Check(v)) {
+ char *s = PyString_AsString(v);
+ Py_ssize_t len = PyString_Size(v);
+ if (len > 0 &&
+ isspace(Py_CHARMASK(s[len-1])) &&
+ s[len-1] != ' ')
+ PyFile_SoftSpace(f, 0);
+ }
+ return 0;
+}
+
+static int __Pyx_PrintNewline(void) {
+ PyObject *f;
+
+ if (!(f = __Pyx_GetStdout()))
+ return -1;
+ if (PyFile_WriteString("\n", f) < 0)
+ return -1;
+ PyFile_SoftSpace(f, 0);
+ return 0;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+# The following function is based on do_raise() from ceval.c.
+
+raise_utility_code = [
+"""
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+""","""
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
+ if (value == Py_None)
+ value = NULL;
+ if (tb == Py_None)
+ tb = NULL;
+ Py_XINCREF(type);
+ Py_XINCREF(value);
+ Py_XINCREF(tb);
+ if (tb && !PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto raise_error;
+ }
+ #if PY_VERSION_HEX < 0x02050000
+ if (!PyClass_Check(type))
+ #else
+ if (!PyType_Check(type))
+ #endif
+ {
+ /* Raising an instance. The value should be a dummy. */
+ if (value) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto raise_error;
+ }
+ /* Normalize to raise <class>, <instance> */
+ value = type;
+ #if PY_VERSION_HEX < 0x02050000
+ if (PyInstance_Check(type)) {
+ type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+ Py_INCREF(type);
+ }
+ else {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception must be an old-style class or instance");
+ goto raise_error;
+ }
+ #else
+ type = (PyObject*) type->ob_type;
+ Py_INCREF(type);
+ if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: exception class must be a subclass of BaseException");
+ goto raise_error;
+ }
+ #endif
+ }
+ PyErr_Restore(type, value, tb);
+ return;
+raise_error:
+ Py_XDECREF(value);
+ Py_XDECREF(type);
+ Py_XDECREF(tb);
+ return;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+#reraise_utility_code = [
+#"""
+#static void __Pyx_ReRaise(void); /*proto*/
+#""","""
+#static void __Pyx_ReRaise(void) {
+# PyThreadState *tstate = PyThreadState_Get();
+# PyObject *type = tstate->exc_type;
+# PyObject *value = tstate->exc_value;
+# PyObject *tb = tstate->exc_traceback;
+# Py_XINCREF(type);
+# Py_XINCREF(value);
+# Py_XINCREF(tb);
+# PyErr_Restore(type, value, tb);
+#}
+#"""]
+
+#------------------------------------------------------------------------------------
+
+arg_type_test_utility_code = [
+"""
+static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/
+""","""
+static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name) {
+ if (!type) {
+ PyErr_Format(PyExc_SystemError, "Missing type object");
+ return 0;
+ }
+ if ((none_allowed && obj == Py_None) || PyObject_TypeCheck(obj, type))
+ return 1;
+ PyErr_Format(PyExc_TypeError,
+ "Argument '%s' has incorrect type (expected %s, got %s)",
+ name, type->tp_name, obj->ob_type->tp_name);
+ return 0;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+#
+# __Pyx_GetStarArgs splits the args tuple and kwds dict into two parts
+# each, one part suitable for passing to PyArg_ParseTupleAndKeywords,
+# and the other containing any extra arguments. On success, replaces
+# the borrowed references *args and *kwds with references to a new
+# tuple and dict, and passes back new references in *args2 and *kwds2.
+# Does not touch any of its arguments on failure.
+#
+# Any of *kwds, args2 and kwds2 may be 0 (but not args or kwds). If
+# *kwds == 0, it is not changed. If kwds2 == 0 and *kwds != 0, a new
+# reference to the same dictionary is passed back in *kwds.
+#
+# If rqd_kwds is not 0, it is an array of booleans corresponding to the
+# names in kwd_list, indicating required keyword arguments. If any of
+# these are not present in kwds, an exception is raised.
+#
+
+get_starargs_utility_code = [
+"""
+static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds, char *kwd_list[], \
+ Py_ssize_t nargs, PyObject **args2, PyObject **kwds2, char rqd_kwds[]); /*proto*/
+""","""
+static int __Pyx_GetStarArgs(
+ PyObject **args,
+ PyObject **kwds,
+ char *kwd_list[],
+ Py_ssize_t nargs,
+ PyObject **args2,
+ PyObject **kwds2,
+ char rqd_kwds[])
+{
+ PyObject *x = 0, *args1 = 0, *kwds1 = 0;
+ int i;
+ char **p;
+
+ if (args2)
+ *args2 = 0;
+ if (kwds2)
+ *kwds2 = 0;
+
+ if (args2) {
+ args1 = PyTuple_GetSlice(*args, 0, nargs);
+ if (!args1)
+ goto bad;
+ *args2 = PyTuple_GetSlice(*args, nargs, PyTuple_GET_SIZE(*args));
+ if (!*args2)
+ goto bad;
+ }
+ else if (PyTuple_GET_SIZE(*args) > nargs) {
+ int m = nargs;
+ int n = PyTuple_GET_SIZE(*args);
+ PyErr_Format(PyExc_TypeError,
+ "function takes at most %d positional arguments (%d given)",
+ m, n);
+ goto bad;
+ }
+ else {
+ args1 = *args;
+ Py_INCREF(args1);
+ }
+
+ if (rqd_kwds && !*kwds)
+ for (i = 0, p = kwd_list; *p; i++, p++)
+ if (rqd_kwds[i])
+ goto missing_kwarg;
+
+ if (kwds2) {
+ if (*kwds) {
+ kwds1 = PyDict_New();
+ if (!kwds1)
+ goto bad;
+ *kwds2 = PyDict_Copy(*kwds);
+ if (!*kwds2)
+ goto bad;
+ for (i = 0, p = kwd_list; *p; i++, p++) {
+ x = PyDict_GetItemString(*kwds, *p);
+ if (x) {
+ if (PyDict_SetItemString(kwds1, *p, x) < 0)
+ goto bad;
+ if (PyDict_DelItemString(*kwds2, *p) < 0)
+ goto bad;
+ }
+ else if (rqd_kwds && rqd_kwds[i])
+ goto missing_kwarg;
+ }
+ }
+ else {
+ *kwds2 = PyDict_New();
+ if (!*kwds2)
+ goto bad;
+ }
+ }
+ else {
+ kwds1 = *kwds;
+ Py_XINCREF(kwds1);
+ if (rqd_kwds && *kwds)
+ for (i = 0, p = kwd_list; *p; i++, p++)
+ if (rqd_kwds[i] && !PyDict_GetItemString(*kwds, *p))
+ goto missing_kwarg;
+ }
+
+ *args = args1;
+ *kwds = kwds1;
+ return 0;
+missing_kwarg:
+ PyErr_Format(PyExc_TypeError,
+ "required keyword argument '%s' is missing", *p);
+bad:
+ Py_XDECREF(args1);
+ Py_XDECREF(kwds1);
+ if (args2) {
+ Py_XDECREF(*args2);
+ }
+ if (kwds2) {
+ Py_XDECREF(*kwds2);
+ }
+ return -1;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+unraisable_exception_utility_code = [
+"""
+static void __Pyx_WriteUnraisable(char *name); /*proto*/
+""","""
+static void __Pyx_WriteUnraisable(char *name) {
+ PyObject *old_exc, *old_val, *old_tb;
+ PyObject *ctx;
+ PyGILState_STATE state = PyGILState_Ensure();
+ PyErr_Fetch(&old_exc, &old_val, &old_tb);
+ ctx = PyString_FromString(name);
+ PyErr_Restore(old_exc, old_val, old_tb);
+ if (!ctx)
+ ctx = Py_None;
+ PyErr_WriteUnraisable(ctx);
+ PyGILState_Release(state);
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+traceback_utility_code = [
+"""
+static void __Pyx_AddTraceback(char *funcname); /*proto*/
+""","""
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(char *funcname) {
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ PyObject *py_globals = 0;
+ PyObject *empty_tuple = 0;
+ PyObject *empty_string = 0;
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+
+ py_srcfile = PyString_FromString(%(FILENAME)s);
+ if (!py_srcfile) goto bad;
+ py_funcname = PyString_FromString(funcname);
+ if (!py_funcname) goto bad;
+ py_globals = PyModule_GetDict(%(GLOBALS)s);
+ if (!py_globals) goto bad;
+ empty_tuple = PyTuple_New(0);
+ if (!empty_tuple) goto bad;
+ empty_string = PyString_FromString("");
+ if (!empty_string) goto bad;
+ py_code = PyCode_New(
+ 0, /*int argcount,*/
+ 0, /*int nlocals,*/
+ 0, /*int stacksize,*/
+ 0, /*int flags,*/
+ empty_string, /*PyObject *code,*/
+ empty_tuple, /*PyObject *consts,*/
+ empty_tuple, /*PyObject *names,*/
+ empty_tuple, /*PyObject *varnames,*/
+ empty_tuple, /*PyObject *freevars,*/
+ empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ %(LINENO)s, /*int firstlineno,*/
+ empty_string /*PyObject *lnotab*/
+ );
+ if (!py_code) goto bad;
+ py_frame = PyFrame_New(
+ PyThreadState_Get(), /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ py_globals, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ py_frame->f_lineno = %(LINENO)s;
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ Py_XDECREF(empty_tuple);
+ Py_XDECREF(empty_string);
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+""" % {
+ 'FILENAME': Naming.filename_cname,
+ 'LINENO': Naming.lineno_cname,
+ 'GLOBALS': Naming.module_cname
+}]
+
+#------------------------------------------------------------------------------------
+
+set_vtable_utility_code = [
+"""
+static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
+""","""
+static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
+ PyObject *pycobj = 0;
+ int result;
+
+ pycobj = PyCObject_FromVoidPtr(vtable, 0);
+ if (!pycobj)
+ goto bad;
+ if (PyDict_SetItemString(dict, "__pyx_vtable__", pycobj) < 0)
+ goto bad;
+ result = 0;
+ goto done;
+
+bad:
+ result = -1;
+done:
+ Py_XDECREF(pycobj);
+ return result;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+get_vtable_utility_code = [
+"""
+static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/
+""",r"""
+static int __Pyx_GetVtable(PyObject *dict, void *vtabptr) {
+ int result;
+ PyObject *pycobj;
+
+ pycobj = PyMapping_GetItemString(dict, "__pyx_vtable__");
+ if (!pycobj)
+ goto bad;
+ *(void **)vtabptr = PyCObject_AsVoidPtr(pycobj);
+ if (!*(void **)vtabptr)
+ goto bad;
+ result = 0;
+ goto done;
+
+bad:
+ result = -1;
+done:
+ Py_XDECREF(pycobj);
+ return result;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+#init_intern_tab_utility_code = [
+#"""
+#static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/
+#""","""
+#static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) {
+# while (t->p) {
+# *t->p = PyString_InternFromString(t->s);
+# if (!*t->p)
+# return -1;
+# ++t;
+# }
+# return 0;
+#}
+#"""]
+
+#init_intern_tab_utility_code = [
+#"""
+#static int __Pyx_InternStrings(PyObject **t[]); /*proto*/
+#""","""
+#static int __Pyx_InternStrings(PyObject **t[]) {
+# while (*t) {
+# PyString_InternInPlace(*t);
+# if (!**t)
+# return -1;
+# ++t;
+# }
+# return 0;
+#}
+#"""]
+
+#------------------------------------------------------------------------------------
+
+init_string_tab_utility_code = [
+"""
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+""","""
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+ while (t->p) {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ if (!*t->p)
+ return -1;
+ if (t->i)
+ PyString_InternInPlace(t->p);
+ ++t;
+ }
+ return 0;
+}
+"""]
+
+#------------------------------------------------------------------------------------
+
+#get_exception_utility_code = [
+#"""
+#static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+#""","""
+#static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) {
+# PyThreadState *tstate = PyThreadState_Get();
+# PyObject *old_type, *old_value, *old_tb;
+# PyErr_Fetch(type, value, tb);
+# PyErr_NormalizeException(type, value, tb);
+# if (PyErr_Occurred())
+# goto bad;
+# if (!*tb) {
+# printf("no traceback\n");
+# *tb = Py_None;
+# Py_INCREF(*tb);
+# }
+##if 1
+# Py_INCREF(*type);
+# Py_INCREF(*value);
+# Py_INCREF(*tb);
+# old_type = tstate->exc_type;
+# old_value = tstate->exc_value;
+# old_tb = tstate->exc_traceback;
+# tstate->exc_type = *type;
+# tstate->exc_value = *value;
+# tstate->exc_traceback = *tb;
+# Py_XDECREF(old_type);
+# Py_XDECREF(old_value);
+# Py_XDECREF(old_tb);
+##endif
+# return 0;
+#bad:
+# Py_XDECREF(*type);
+# Py_XDECREF(*value);
+# Py_XDECREF(*tb);
+# return -1;
+#}
+#"""]
+
+#------------------------------------------------------------------------------------
+
+#get_exception_utility_code = [
+#"""
+#static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+#""","""
+#static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) {
+# PyErr_Fetch(type, value, tb);
+# PyErr_NormalizeException(type, value, tb);
+# if (PyErr_Occurred())
+# goto bad;
+# if (!*tb) {
+# *tb = Py_None;
+# Py_INCREF(*tb);
+# }
+# return 0;
+#bad:
+# Py_XDECREF(*type);
+# Py_XDECREF(*value);
+# Py_XDECREF(*tb);
+# return -1;
+#}
+#"""]
+
+#------------------------------------------------------------------------------------
+
+normalize_exception_utility_code = [
+"""
+static int __Pyx_NormalizeException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+""","""
+static int __Pyx_NormalizeException(PyObject **type, PyObject **value, PyObject **tb) {
+ PyErr_NormalizeException(type, value, tb);
+ if (PyErr_Occurred())
+ goto bad;
+ if (!*tb) {
+ *tb = Py_None;
+ Py_INCREF(*tb);
+ }
+ return 0;
+bad:
+ Py_XDECREF(*type);
+ Py_XDECREF(*value);
+ Py_XDECREF(*tb);
+ return -1;
+}
+"""]
+
+#------------------------------------------------------------------------------------
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Options.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Options.py
new file mode 100644
index 00000000..0754fa24
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Options.py
@@ -0,0 +1,5 @@
+#
+# Pyrex - Compilation-wide options
+#
+
+intern_names = 1 # Intern global variable and attribute names
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Parsing.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Parsing.py
new file mode 100644
index 00000000..b7b9e4b5
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Parsing.py
@@ -0,0 +1,2142 @@
+#
+# Pyrex Parser
+#
+
+import os, re
+from string import join, replace
+from types import ListType, TupleType
+from Scanning import PyrexScanner
+import Nodes
+import ExprNodes
+from ModuleNode import ModuleNode
+from Errors import warning, error, InternalError
+
+
+class Ctx(object):
+ # Parsing context
+ level = 'other'
+ visibility = 'private'
+ extern_from = False
+ cdef_flag = 0
+ cplus_flag = 0
+ typedef_flag = 0
+ api = 0
+ nogil = 0
+
+ def __init__(self, **kwds):
+ self.__dict__.update(kwds)
+
+ def __call__(self, **kwds):
+ ctx = Ctx()
+ d = ctx.__dict__
+ d.update(self.__dict__)
+ d.update(kwds)
+ return ctx
+
+ def cplus_check(self, pos):
+ #if self.visibility <> 'extern':
+ # error(pos, "C++ declarations must be 'extern'")
+ if self.cplus_flag and not self.extern_from:
+ error(pos, "C++ declarations must be in an 'extern from' block")
+
+
+def p_ident(s, message = "Expected an identifier"):
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ return name
+ else:
+ s.error(message)
+
+def p_ident_list(s):
+ names = []
+ while s.sy == 'IDENT':
+ names.append(s.systring)
+ s.next()
+ if s.sy <> ',':
+ break
+ s.next()
+ return names
+
+#------------------------------------------
+#
+# Expressions
+#
+#------------------------------------------
+
+def p_binop_expr(s, ops, p_sub_expr):
+ n1 = p_sub_expr(s)
+ while s.sy in ops:
+ op = s.sy
+ pos = s.position()
+ s.next()
+ n2 = p_sub_expr(s)
+ n1 = ExprNodes.binop_node(pos, op, n1, n2)
+ return n1
+
+#test: and_test ('or' and_test)* | lambdef
+
+def p_simple_expr(s):
+ return p_rassoc_binop_expr(s, ('or',), p_and_test)
+
+def p_rassoc_binop_expr(s, ops, p_subexpr):
+ n1 = p_subexpr(s)
+ if s.sy in ops:
+ pos = s.position()
+ op = s.sy
+ s.next()
+ n2 = p_rassoc_binop_expr(s, ops, p_subexpr)
+ n1 = ExprNodes.binop_node(pos, op, n1, n2)
+ return n1
+
+#and_test: not_test ('and' not_test)*
+
+def p_and_test(s):
+ #return p_binop_expr(s, ('and',), p_not_test)
+ return p_rassoc_binop_expr(s, ('and',), p_not_test)
+
+#not_test: 'not' not_test | comparison
+
+def p_not_test(s):
+ if s.sy == 'not':
+ pos = s.position()
+ s.next()
+ return ExprNodes.NotNode(pos, operand = p_not_test(s))
+ else:
+ return p_comparison(s)
+
+#comparison: expr (comp_op expr)*
+#comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+
+def p_comparison(s):
+ n1 = p_bit_expr(s)
+ if s.sy in comparison_ops:
+ pos = s.position()
+ op = p_cmp_op(s)
+ n2 = p_bit_expr(s)
+ n1 = ExprNodes.PrimaryCmpNode(pos,
+ operator = op, operand1 = n1, operand2 = n2)
+ if s.sy in comparison_ops:
+ n1.cascade = p_cascaded_cmp(s)
+ return n1
+
+def p_cascaded_cmp(s):
+ pos = s.position()
+ op = p_cmp_op(s)
+ n2 = p_bit_expr(s)
+ result = ExprNodes.CascadedCmpNode(pos,
+ operator = op, operand2 = n2)
+ if s.sy in comparison_ops:
+ result.cascade = p_cascaded_cmp(s)
+ return result
+
+def p_cmp_op(s):
+ if s.sy == 'not':
+ s.next()
+ s.expect('in')
+ op = 'not_in'
+ elif s.sy == 'is':
+ s.next()
+ if s.sy == 'not':
+ s.next()
+ op = 'is_not'
+ else:
+ op = 'is'
+ else:
+ op = s.sy
+ s.next()
+ if op == '<>':
+ op = '!='
+ return op
+
+comparison_ops = (
+ '<', '>', '==', '>=', '<=', '<>', '!=',
+ 'in', 'is', 'not'
+)
+
+#expr: xor_expr ('|' xor_expr)*
+
+def p_bit_expr(s):
+ return p_binop_expr(s, ('|',), p_xor_expr)
+
+#xor_expr: and_expr ('^' and_expr)*
+
+def p_xor_expr(s):
+ return p_binop_expr(s, ('^',), p_and_expr)
+
+#and_expr: shift_expr ('&' shift_expr)*
+
+def p_and_expr(s):
+ return p_binop_expr(s, ('&',), p_shift_expr)
+
+#shift_expr: arith_expr (('<<'|'>>') arith_expr)*
+
+def p_shift_expr(s):
+ return p_binop_expr(s, ('<<', '>>'), p_arith_expr)
+
+#arith_expr: term (('+'|'-') term)*
+
+def p_arith_expr(s):
+ return p_binop_expr(s, ('+', '-'), p_term)
+
+#term: factor (('*'|'/'|'%') factor)*
+
+def p_term(s):
+ return p_binop_expr(s, ('*', '/', '%'), p_factor)
+
+#factor: ('+'|'-'|'~'|'&'|typecast|sizeof) factor | power
+
+def p_factor(s):
+ sy = s.sy
+ if sy in ('+', '-', '~'):
+ op = s.sy
+ pos = s.position()
+ s.next()
+ return ExprNodes.unop_node(pos, op, p_factor(s))
+ elif sy == '&':
+ pos = s.position()
+ s.next()
+ arg = p_factor(s)
+ return ExprNodes.AmpersandNode(pos, operand = arg)
+ elif sy == "<":
+ return p_typecast(s)
+ elif sy == 'IDENT' and s.systring == "sizeof":
+ return p_sizeof(s)
+ else:
+ return p_power(s)
+
+def p_typecast(s):
+ # s.sy == "<"
+ pos = s.position()
+ s.next()
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ s.expect(">")
+ operand = p_factor(s)
+ return ExprNodes.TypecastNode(pos,
+ base_type = base_type,
+ declarator = declarator,
+ operand = operand)
+
+def p_sizeof(s):
+ # s.sy == ident "sizeof"
+ pos = s.position()
+ s.next()
+ s.expect('(')
+ if looking_at_type(s):
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ node = ExprNodes.SizeofTypeNode(pos,
+ base_type = base_type, declarator = declarator)
+ else:
+ operand = p_simple_expr(s)
+ node = ExprNodes.SizeofVarNode(pos, operand = operand)
+ s.expect(')')
+ return node
+
+#power: atom trailer* ('**' factor)*
+
+def p_power(s):
+ n1 = p_primitive(s)
+ if s.sy == '**':
+ pos = s.position()
+ s.next()
+ n2 = p_factor(s)
+ n1 = ExprNodes.binop_node(pos, '**', n1, n2)
+ return n1
+
+def p_primitive(s):
+ n = p_atom(s)
+ while s.sy in ('(', '[', '.'):
+ n = p_trailer(s, n)
+ return n
+
+#trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+
+def p_trailer(s, node1):
+ pos = s.position()
+ if s.sy == '(':
+ return p_call(s, node1)
+ elif s.sy == '[':
+ return p_index(s, node1)
+ else: # s.sy == '.'
+ s.next()
+ name = p_ident(s)
+ return ExprNodes.AttributeNode(pos,
+ obj = node1, attribute = name)
+
+# arglist: argument (',' argument)* [',']
+# argument: [test '='] test # Really [keyword '='] test
+
+def p_call(s, function):
+ # s.sy == '('
+ pos = s.position()
+ s.next()
+ positional_args = []
+ keyword_args = []
+ star_arg = None
+ starstar_arg = None
+ while s.sy not in ('*', '**', ')'):
+ arg = p_simple_expr(s)
+ if s.sy == '=':
+ s.next()
+ if not arg.is_name:
+ s.error("Expected an identifier before '='",
+ pos = arg.pos)
+ keyword = ExprNodes.StringNode(arg.pos,
+ value = arg.name)
+ arg = p_simple_expr(s)
+ keyword_args.append((keyword, arg))
+ else:
+ if keyword_args:
+ s.error("Non-keyword arg following keyword arg",
+ pos = arg.pos)
+ positional_args.append(arg)
+ if s.sy <> ',':
+ break
+ s.next()
+ if s.sy == '*':
+ s.next()
+ star_arg = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ if s.sy == '**':
+ s.next()
+ starstar_arg = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ s.expect(')')
+ if not (keyword_args or star_arg or starstar_arg):
+ return ExprNodes.SimpleCallNode(pos,
+ function = function,
+ args = positional_args)
+ else:
+ arg_tuple = None
+ keyword_dict = None
+ if positional_args or not star_arg:
+ arg_tuple = ExprNodes.TupleNode(pos,
+ args = positional_args)
+ if star_arg:
+ star_arg_tuple = ExprNodes.AsTupleNode(pos, arg = star_arg)
+ if arg_tuple:
+ arg_tuple = ExprNodes.binop_node(pos,
+ operator = '+', operand1 = arg_tuple,
+ operand2 = star_arg_tuple)
+ else:
+ arg_tuple = star_arg_tuple
+ if keyword_args:
+ keyword_dict = ExprNodes.DictNode(pos,
+ key_value_pairs = keyword_args)
+ return ExprNodes.GeneralCallNode(pos,
+ function = function,
+ positional_args = arg_tuple,
+ keyword_args = keyword_dict,
+ starstar_arg = starstar_arg)
+
+#lambdef: 'lambda' [varargslist] ':' test
+
+#subscriptlist: subscript (',' subscript)* [',']
+
+def p_index(s, base):
+ # s.sy == '['
+ pos = s.position()
+ s.next()
+ subscripts = p_subscript_list(s)
+ if len(subscripts) == 1 and len(subscripts[0]) == 2:
+ start, stop = subscripts[0]
+ result = ExprNodes.SliceIndexNode(pos,
+ base = base, start = start, stop = stop)
+ else:
+ indexes = make_slice_nodes(pos, subscripts)
+ if len(indexes) == 1:
+ index = indexes[0]
+ else:
+ index = ExprNodes.TupleNode(pos, args = indexes)
+ result = ExprNodes.IndexNode(pos,
+ base = base, index = index)
+ s.expect(']')
+ return result
+
+def p_subscript_list(s):
+ items = [p_subscript(s)]
+ while s.sy == ',':
+ s.next()
+ if s.sy == ']':
+ break
+ items.append(p_subscript(s))
+ return items
+
+#subscript: '.' '.' '.' | test | [test] ':' [test] [':' [test]]
+
+def p_subscript(s):
+ # Parse a subscript and return a list of
+ # 1, 2 or 3 ExprNodes, depending on how
+ # many slice elements were encountered.
+ pos = s.position()
+ if s.sy == '.':
+ expect_ellipsis(s)
+ return [ExprNodes.EllipsisNode(pos)]
+ else:
+ start = p_slice_element(s, (':',))
+ if s.sy <> ':':
+ return [start]
+ s.next()
+ stop = p_slice_element(s, (':', ',', ']'))
+ if s.sy <> ':':
+ return [start, stop]
+ s.next()
+ step = p_slice_element(s, (':', ',', ']'))
+ return [start, stop, step]
+
+def p_slice_element(s, follow_set):
+ # Simple expression which may be missing iff
+ # it is followed by something in follow_set.
+ if s.sy not in follow_set:
+ return p_simple_expr(s)
+ else:
+ return None
+
+def expect_ellipsis(s):
+ s.expect('.')
+ s.expect('.')
+ s.expect('.')
+
+def make_slice_nodes(pos, subscripts):
+ # Convert a list of subscripts as returned
+ # by p_subscript_list into a list of ExprNodes,
+ # creating SliceNodes for elements with 2 or
+ # more components.
+ result = []
+ for subscript in subscripts:
+ if len(subscript) == 1:
+ result.append(subscript[0])
+ else:
+ result.append(make_slice_node(pos, *subscript))
+ return result
+
+def make_slice_node(pos, start, stop = None, step = None):
+ if not start:
+ start = ExprNodes.NoneNode(pos)
+ if not stop:
+ stop = ExprNodes.NoneNode(pos)
+ if not step:
+ step = ExprNodes.NoneNode(pos)
+ return ExprNodes.SliceNode(pos,
+ start = start, stop = stop, step = step)
+
+#atom: '(' [testlist] ')' | '[' [listmaker] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING+
+
+def p_atom(s):
+ pos = s.position()
+ sy = s.sy
+ if sy == '(':
+ s.next()
+ if s.sy == ')':
+ result = ExprNodes.TupleNode(pos, args = [])
+ else:
+ result = p_expr(s)
+ s.expect(')')
+ return result
+ elif sy == '[':
+ return p_list_maker(s)
+ elif sy == '{':
+ return p_dict_maker(s)
+ elif sy == '`':
+ return p_backquote_expr(s)
+ elif sy == 'INT':
+ value = s.systring
+ s.next()
+ return ExprNodes.IntNode(pos, value = value)
+ elif sy == 'LONG':
+ value = s.systring
+ s.next()
+ return ExprNodes.LongNode(pos, value = value)
+ elif sy == 'FLOAT':
+ value = s.systring
+ s.next()
+ return ExprNodes.FloatNode(pos, value = value)
+ elif sy == 'IMAG':
+ value = s.systring[:-1]
+ s.next()
+ return ExprNodes.ImagNode(pos, value = value)
+ elif sy == 'STRING' or sy == 'BEGIN_STRING':
+ kind, value = p_cat_string_literal(s)
+ if kind == 'c':
+ return ExprNodes.CharNode(pos, value = value)
+ else:
+ return ExprNodes.StringNode(pos, value = value)
+ elif sy == 'IDENT':
+ name = s.systring
+ s.next()
+ if name == "None":
+ return ExprNodes.NoneNode(pos)
+ elif name == "new" and s.sy == 'IDENT':
+ return p_new_call(s)
+ else:
+ return p_name_atom(s, name)
+ elif sy == 'NULL':
+ s.next()
+ return ExprNodes.NullNode(pos)
+ else:
+ s.error("Expected an identifier or literal")
+
+def p_new_call(s):
+ node = p_primitive(s)
+ if isinstance(node, ExprNodes.SimpleCallNode):
+ node.is_new = 1
+ else:
+ error(s.position(), "'new' must be followed by a C++ constructor call")
+ return node
+
+def p_name(s):
+ if s.sy == 'IDENT':
+ pos = s.position()
+ name = s.systring
+ s.next()
+ return ExprNodes.NameNode(pos, name = name)
+ else:
+ s.error("Expected a variable name")
+
+def p_name_atom(s, name):
+ pos = s.position()
+ if not s.compile_time_expr:
+ try:
+ value = s.compile_time_env.lookup_here(name)
+ except KeyError:
+ pass
+ else:
+ rep = repr(value)
+ if isinstance(value, int):
+ return ExprNodes.IntNode(pos, value = rep)
+ elif isinstance(value, long):
+ return ExprNodes.LongNode(pos, value = rep)
+ elif isinstance(value, float):
+ return ExprNodes.FloatNode(pos, value = rep)
+ elif isinstance(value, str):
+ return ExprNodes.StringNode(pos, value = rep[1:-1])
+ else:
+ error(pos, "Invalid type for compile-time constant: %s"
+ % value.__class__.__name__)
+ return ExprNodes.NameNode(pos, name = name)
+
+def p_cat_string_literal(s):
+ # A sequence of one or more adjacent string literals.
+ # Returns (kind, value) where kind in ('', 'c', 'r')
+ kind, value = p_string_literal(s)
+ if kind <> 'c':
+ strings = [value]
+ while s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ next_kind, next_value = p_string_literal(s)
+ if next_kind == 'c':
+ self.error(
+ "Cannot concatenate char literal with another string or char literal")
+ strings.append(next_value)
+ value = ''.join(strings)
+ return kind, value
+
+def p_opt_string_literal(s):
+ if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ return p_string_literal(s)
+ else:
+ return None
+
+def p_string_literal(s):
+ # A single string or char literal.
+ # Returns (kind, value) where kind in ('', 'c', 'r')
+ if s.sy == 'STRING':
+ value = unquote(s.systring)
+ s.next()
+ return value
+ # s.sy == 'BEGIN_STRING'
+ pos = s.position()
+ #is_raw = s.systring[:1].lower() == "r"
+ kind = s.systring[:1].lower()
+ if kind not in "cr":
+ kind = ''
+ chars = []
+ while 1:
+ s.next()
+ sy = s.sy
+ #print "p_string_literal: sy =", sy, repr(s.systring) ###
+ if sy == 'CHARS':
+ systr = s.systring
+ if len(systr) == 1 and systr in "'\"\n":
+ chars.append('\\')
+ chars.append(systr)
+ elif sy == 'ESCAPE':
+ systr = s.systring
+ if kind == 'r':
+ if systr == '\\\n':
+ chars.append(r'\\\n')
+ elif systr == r'\"':
+ chars.append(r'\\\"')
+ elif systr == r'\\':
+ chars.append(r'\\\\')
+ else:
+ chars.append('\\' + systr)
+ else:
+ c = systr[1]
+ if c in "'\"\\abfnrtv01234567":
+ chars.append(systr)
+ elif c == 'x':
+ chars.append('\\x0' + systr[2:])
+ elif c == '\n':
+ pass
+ else:
+ chars.append(r'\\' + systr[1:])
+ elif sy == 'NEWLINE':
+ chars.append(r'\n')
+ elif sy == 'END_STRING':
+ break
+ elif sy == 'EOF':
+ s.error("Unclosed string literal", pos = pos)
+ else:
+ s.error(
+ "Unexpected token %r:%r in string literal" %
+ (sy, s.systring))
+ s.next()
+ value = join(chars, '')
+ #print "p_string_literal: value =", repr(value) ###
+ return kind, value
+
+def unquote(s):
+ is_raw = 0
+ if s[:1].lower() == "r":
+ is_raw = 1
+ s = s[1:]
+ q = s[:3]
+ if q == '"""' or q == "'''":
+ s = s[3:-3]
+ else:
+ s = s[1:-1]
+ if is_raw:
+ s = s.replace('\\', '\\\\')
+ s = s.replace('\n', '\\\n')
+ else:
+ # Split into double quotes, newlines, escape sequences
+ # and spans of regular chars
+ l1 = re.split(r'((?:\\[0-7]{1,3})|(?:\\x[0-9A-Fa-f]{2})|(?:\\.)|(?:\\\n)|(?:\n)|")', s)
+ print "unquote: l1 =", l1 ###
+ l2 = []
+ for item in l1:
+ if item == '"' or item == '\n':
+ l2.append('\\' + item)
+ elif item == '\\\n':
+ pass
+ elif item[:1] == '\\':
+ if len(item) == 2:
+ if item[1] in '"\\abfnrtv':
+ l2.append(item)
+ else:
+ l2.append(item[1])
+ elif item[1:2] == 'x':
+ l2.append('\\x0' + item[2:])
+ else:
+ # octal escape
+ l2.append(item)
+ else:
+ l2.append(item)
+ s = "".join(l2)
+ return s
+
+def p_list_maker(s):
+ # s.sy == '['
+ pos = s.position()
+ s.next()
+ exprs = p_simple_expr_list(s)
+ s.expect(']')
+ return ExprNodes.ListNode(pos, args = exprs)
+
+#dictmaker: test ':' test (',' test ':' test)* [',']
+
+def p_dict_maker(s):
+ # s.sy == '{'
+ pos = s.position()
+ s.next()
+ items = []
+ while s.sy <> '}':
+ key = p_simple_expr(s)
+ s.expect(':')
+ value = p_simple_expr(s)
+ items.append((key, value))
+ if s.sy <> ',':
+ break
+ s.next()
+ s.expect('}')
+ return ExprNodes.DictNode(pos, key_value_pairs = items)
+
+def p_backquote_expr(s):
+ # s.sy == '`'
+ pos = s.position()
+ s.next()
+ arg = p_expr(s)
+ s.expect('`')
+ return ExprNodes.BackquoteNode(pos, arg = arg)
+
+#testlist: test (',' test)* [',']
+
+def p_simple_expr_list(s):
+ exprs = []
+ while s.sy not in expr_terminators:
+ exprs.append(p_simple_expr(s))
+ if s.sy <> ',':
+ break
+ s.next()
+ return exprs
+
+def p_expr(s):
+ pos = s.position()
+ expr = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exprs = [expr] + p_simple_expr_list(s)
+ return ExprNodes.TupleNode(pos, args = exprs)
+ else:
+ return expr
+
+expr_terminators = (')', ']', '}', ':', '=', 'NEWLINE')
+
+#-------------------------------------------------------
+#
+# Statements
+#
+#-------------------------------------------------------
+
+def p_global_statement(s):
+ # assume s.sy == 'global'
+ pos = s.position()
+ s.next()
+ names = p_ident_list(s)
+ return Nodes.GlobalNode(pos, names = names)
+
+inplace_operators = ('+=', '-=', '*=', '/=', '%=', '**=',
+ '<<=', '>>=', '&=', '^=', '|=')
+
+def p_expression_or_assignment(s):
+ pos = s.position()
+ expr = p_expr(s)
+ if s.sy in inplace_operators:
+ return p_inplace_operation(s, expr)
+ elif s.sy <> '=':
+ if isinstance(expr, ExprNodes.StringNode):
+ return Nodes.PassStatNode(expr.pos)
+ else:
+ return Nodes.ExprStatNode(expr.pos, expr = expr)
+ else:
+ expr_list = [expr]
+ while s.sy == '=':
+ s.next()
+ expr_list.append(p_expr(s))
+ expr_list_list = []
+ flatten_parallel_assignments(expr_list, expr_list_list)
+ nodes = []
+ for expr_list in expr_list_list:
+ lhs_list = expr_list[:-1]
+ rhs = expr_list[-1]
+ if len(lhs_list) == 1:
+ node = Nodes.SingleAssignmentNode(rhs.pos,
+ lhs = lhs_list[0], rhs = rhs)
+ else:
+ node = Nodes.CascadedAssignmentNode(rhs.pos,
+ lhs_list = lhs_list, rhs = rhs)
+ nodes.append(node)
+ if len(nodes) == 1:
+ return nodes[0]
+ else:
+ return Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes)
+
+def p_inplace_operation(s, lhs):
+ pos = s.position()
+ op = s.sy
+ s.next()
+ rhs = p_expr(s)
+ return Nodes.AugmentedAssignmentNode(pos, lhs = lhs, operator = op, rhs = rhs)
+
+def flatten_parallel_assignments(input, output):
+ # The input is a list of expression nodes, representing
+ # the LHSs and RHS of one (possibly cascaded) assignment
+ # statement. If they are all sequence constructors with
+ # the same number of arguments, rearranges them into a
+ # list of equivalent assignments between the individual
+ # elements. This transformation is applied recursively.
+ size = find_parallel_assignment_size(input)
+ if size >= 0:
+ for i in range(size):
+ new_exprs = [expr.args[i] for expr in input]
+ flatten_parallel_assignments(new_exprs, output)
+ else:
+ output.append(input)
+
+def find_parallel_assignment_size(input):
+ # The input is a list of expression nodes. If
+ # they are all sequence constructors with the same number
+ # of arguments, return that number, else return -1.
+ # Produces an error message if they are all sequence
+ # constructors but not all the same size.
+ for expr in input:
+ if not expr.is_sequence_constructor:
+ return -1
+ rhs = input[-1]
+ rhs_size = len(rhs.args)
+ for lhs in input[:-1]:
+ lhs_size = len(lhs.args)
+ if lhs_size <> rhs_size:
+ error(lhs.pos, "Unpacking sequence of wrong size (expected %d, got %d)"
+ % (lhs_size, rhs_size))
+ return -1
+ return rhs_size
+
+def p_print_statement(s):
+ # s.sy == 'print'
+ pos = s.position()
+ s.next()
+ if s.sy == '>>':
+ s.error("'print >>' not yet implemented")
+ args = []
+ ewc = 0
+ if s.sy not in ('NEWLINE', 'EOF'):
+ args.append(p_simple_expr(s))
+ while s.sy == ',':
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ ewc = 1
+ break
+ args.append(p_simple_expr(s))
+ return Nodes.PrintStatNode(pos,
+ args = args, ends_with_comma = ewc)
+
+def p_del_statement(s):
+ # s.sy == 'del'
+ pos = s.position()
+ s.next()
+ args = p_simple_expr_list(s)
+ return Nodes.DelStatNode(pos, args = args)
+
+def p_pass_statement(s, with_newline = 0):
+ pos = s.position()
+ s.expect('pass')
+ if with_newline:
+ s.expect_newline("Expected a newline")
+ return Nodes.PassStatNode(pos)
+
+def p_break_statement(s):
+ # s.sy == 'break'
+ pos = s.position()
+ s.next()
+ return Nodes.BreakStatNode(pos)
+
+def p_continue_statement(s):
+ # s.sy == 'continue'
+ pos = s.position()
+ s.next()
+ return Nodes.ContinueStatNode(pos)
+
+def p_return_statement(s):
+ # s.sy == 'return'
+ pos = s.position()
+ s.next()
+ if s.sy not in statement_terminators:
+ value = p_expr(s)
+ else:
+ value = None
+ return Nodes.ReturnStatNode(pos, value = value)
+
+def p_raise_statement(s):
+ # s.sy == 'raise'
+ pos = s.position()
+ s.next()
+ exc_type = None
+ exc_value = None
+ exc_tb = None
+ if s.sy not in statement_terminators:
+ exc_type = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_value = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_tb = p_simple_expr(s)
+ if exc_type or exc_value or exc_tb:
+ return Nodes.RaiseStatNode(pos,
+ exc_type = exc_type,
+ exc_value = exc_value,
+ exc_tb = exc_tb)
+ else:
+ return Nodes.ReraiseStatNode(pos)
+
+def p_import_statement(s):
+ # s.sy in ('import', 'cimport')
+ pos = s.position()
+ kind = s.sy
+ s.next()
+ items = [p_dotted_name(s, as_allowed = 1)]
+ while s.sy == ',':
+ s.next()
+ items.append(p_dotted_name(s, as_allowed = 1))
+ stats = []
+ for pos, target_name, dotted_name, as_name in items:
+ if kind == 'cimport':
+ stat = Nodes.CImportStatNode(pos,
+ module_name = dotted_name,
+ as_name = as_name)
+ else:
+ if as_name and "." in dotted_name:
+ name_list = ExprNodes.ListNode(pos, args = [
+ ExprNodes.StringNode(pos, value = "*")])
+ else:
+ name_list = None
+ stat = Nodes.SingleAssignmentNode(pos,
+ lhs = ExprNodes.NameNode(pos,
+ name = as_name or target_name),
+ rhs = ExprNodes.ImportNode(pos,
+ module_name = ExprNodes.StringNode(pos,
+ value = dotted_name),
+ name_list = name_list))
+ stats.append(stat)
+ return Nodes.StatListNode(pos, stats = stats)
+
+def p_from_import_statement(s, ctx):
+ # s.sy == 'from'
+ pos = s.position()
+ s.next()
+ (dotted_name_pos, _, dotted_name, _) = \
+ p_dotted_name(s, as_allowed = 0)
+ if s.sy in ('import', 'cimport'):
+ kind = s.sy
+ s.next()
+ else:
+ s.error("Expected 'import' or 'cimport'")
+ if kind == 'cimport' and ctx.level not in ('module', 'module_pxd'):
+ s.error("cimport statement not allowed in this context")
+ if s.sy == '*':
+ s.error("'import *' not supported")
+ is_cimport = kind == 'cimport'
+ imported_names = [p_imported_name(s, is_cimport)]
+ while s.sy == ',':
+ s.next()
+ imported_names.append(p_imported_name(s, is_cimport))
+ if kind == 'cimport':
+ for imp in imported_names:
+ local_name = imp.as_name or imp.name
+ s.add_type_name(local_name)
+ return Nodes.FromCImportStatNode(pos,
+ module_name = dotted_name,
+ imported_names = imported_names)
+ else:
+ imported_name_strings = []
+ items = []
+ for imp in imported_names:
+ imported_name_strings.append(
+ ExprNodes.StringNode(imp.pos, value = imp.name))
+ items.append(
+ (imp.name,
+ ExprNodes.NameNode(imp.pos,
+ name = imp.as_name or imp.name)))
+ import_list = ExprNodes.ListNode(
+ imported_names[0].pos, args = imported_name_strings)
+ return Nodes.FromImportStatNode(pos,
+ module = ExprNodes.ImportNode(dotted_name_pos,
+ module_name = ExprNodes.StringNode(dotted_name_pos,
+ value = dotted_name),
+ name_list = import_list),
+ items = items)
+
+class ImportedName(object):
+ # pos
+ # name
+ # as_name
+ # kind 'class', 'struct', 'union', None
+
+ def __init__(self, pos, name, as_name, kind):
+ self.pos = pos
+ self.name = name
+ self.as_name = as_name
+ self.kind = kind
+
+imported_name_kinds = ('class', 'struct', 'union')
+
+def p_imported_name(s, is_cimport):
+ pos = s.position()
+ kind = None
+ if is_cimport and s.systring in imported_name_kinds:
+ kind = s.systring
+ s.next()
+ name = p_ident(s)
+ as_name = p_as_name(s)
+ return ImportedName(pos, name, as_name, kind)
+
+def p_dotted_name(s, as_allowed):
+ pos = s.position()
+ target_name = p_ident(s)
+ as_name = None
+ names = [target_name]
+ while s.sy == '.':
+ s.next()
+ names.append(p_ident(s))
+ if as_allowed:
+ as_name = p_as_name(s)
+ return (pos, target_name, join(names, "."), as_name)
+
+def p_as_name(s):
+ if s.sy == 'IDENT' and s.systring == 'as':
+ s.next()
+ return p_ident(s)
+ else:
+ return None
+
+def p_assert_statement(s):
+ # s.sy == 'assert'
+ pos = s.position()
+ s.next()
+ cond = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ value = p_simple_expr(s)
+ else:
+ value = None
+ return Nodes.AssertStatNode(pos, cond = cond, value = value)
+
+statement_terminators = (';', 'NEWLINE', 'EOF')
+
+def p_if_statement(s):
+ # s.sy == 'if'
+ pos = s.position()
+ s.next()
+ if_clauses = [p_if_clause(s)]
+ while s.sy == 'elif':
+ s.next()
+ if_clauses.append(p_if_clause(s))
+ else_clause = p_else_clause(s)
+ return Nodes.IfStatNode(pos,
+ if_clauses = if_clauses, else_clause = else_clause)
+
+def p_if_clause(s):
+ pos = s.position()
+ test = p_simple_expr(s)
+ body = p_suite(s)
+ return Nodes.IfClauseNode(pos,
+ condition = test, body = body)
+
+def p_else_clause(s):
+ if s.sy == 'else':
+ s.next()
+ return p_suite(s)
+ else:
+ return None
+
+def p_while_statement(s):
+ # s.sy == 'while'
+ pos = s.position()
+ s.next()
+ test = p_simple_expr(s)
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.WhileStatNode(pos,
+ condition = test, body = body,
+ else_clause = else_clause)
+
+def p_for_statement(s):
+ # s.sy == 'for'
+ pos = s.position()
+ s.next()
+ expr = p_for_expr(s)
+ if s.sy == 'in':
+ return p_standard_for_statement(s, expr)
+ elif s.sy in inequality_relations:
+ return p_integer_for_statement(s, expr)
+ elif s.sy == 'from':
+ #warning(pos, "Old-style integer for-loop is deprecated, use 'for x < i < y' instead")
+ return p_old_style_integer_for_statement(s, expr)
+ else:
+ s.error("Expected 'in' or an inequality relation")
+
+def p_standard_for_statement(s, target):
+ # s.sy == 'in'
+ s.next()
+ iterator = p_for_iterator(s)
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.ForInStatNode(target.pos,
+ target = target,
+ iterator = iterator,
+ body = body,
+ else_clause = else_clause)
+
+def p_integer_for_statement(s, bound1):
+ rel1 = s.sy
+ s.next()
+ name_pos = s.position()
+ target = p_name(s)
+ rel2_pos = s.position()
+ rel2 = p_inequality_relation(s)
+ bound2 = p_bit_expr(s)
+ if rel1[0] <> rel2[0]:
+ error(rel2_pos,
+ "Relation directions in integer for-loop do not match")
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.IntegerForStatNode(bound1.pos,
+ bound1 = bound1,
+ relation1 = rel1,
+ target = target,
+ relation2 = rel2,
+ bound2 = bound2,
+ body = body,
+ else_clause = else_clause)
+
+def p_old_style_integer_for_statement(s, target):
+ # s.sy == 'for'
+ s.next()
+ bound1 = p_bit_expr(s)
+ rel1 = p_inequality_relation(s)
+ name2_pos = s.position()
+ name2 = p_ident(s)
+ rel2_pos = s.position()
+ rel2 = p_inequality_relation(s)
+ bound2 = p_bit_expr(s)
+ if not target.is_name:
+ error(target.pos,
+ "Target of for-from statement must be a variable name")
+ elif name2 <> target.name:
+ error(name2_pos,
+ "Variable name in for-from range does not match target")
+ if rel1[0] <> rel2[0]:
+ error(rel2_pos,
+ "Relation directions in for-from do not match")
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.IntegerForStatNode(bound1.pos,
+ bound1 = bound1,
+ relation1 = rel1,
+ target = target,
+ relation2 = rel2,
+ bound2 = bound2,
+ body = body,
+ else_clause = else_clause)
+
+def p_inequality_relation(s):
+ if s.sy in inequality_relations:
+ op = s.sy
+ s.next()
+ return op
+ else:
+ s.error("Expected one of '<', '<=', '>' '>='")
+
+inequality_relations = ('<', '<=', '>', '>=')
+
+def p_for_expr(s):
+ # Target of standard for-statement or first bound of integer for-statement
+ pos = s.position()
+ expr = p_bit_expr(s)
+ if s.sy == ',':
+ s.next()
+ exprs = [expr]
+ while s.sy <> 'in':
+ exprs.append(p_bit_expr(s))
+ if s.sy <> ',':
+ break
+ s.next()
+ return ExprNodes.TupleNode(pos, args = exprs)
+ else:
+ return expr
+
+def p_for_iterator(s):
+ pos = s.position()
+ expr = p_expr(s)
+ return ExprNodes.IteratorNode(pos, sequence = expr)
+
+def p_try_statement(s):
+ # s.sy == 'try'
+ pos = s.position()
+ s.next()
+ body = p_suite(s)
+ except_clauses = []
+ else_clause = None
+ if s.sy in ('except', 'else'):
+ while s.sy == 'except':
+ except_clauses.append(p_except_clause(s))
+ if s.sy == 'else':
+ s.next()
+ else_clause = p_suite(s)
+ return Nodes.TryExceptStatNode(pos,
+ body = body, except_clauses = except_clauses,
+ else_clause = else_clause)
+ elif s.sy == 'finally':
+ s.next()
+ finally_clause = p_suite(s)
+ return Nodes.TryFinallyStatNode(pos,
+ body = body, finally_clause = finally_clause)
+ else:
+ s.error("Expected 'except' or 'finally'")
+
+def p_except_clause(s):
+ # s.sy == 'except'
+ pos = s.position()
+ s.next()
+ exc_type = None
+ exc_value = None
+ tb_value = None
+ if s.sy <> ':':
+ exc_type = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_value = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ tb_value = p_simple_expr(s)
+ body = p_suite(s)
+ return Nodes.ExceptClauseNode(pos,
+ pattern = exc_type, exc_target = exc_value, tb_target = tb_value, body = body)
+
+def p_include_statement(s, ctx):
+ pos = s.position()
+ s.next() # 'include'
+ _, include_file_name = p_string_literal(s)
+ s.expect_newline("Syntax error in include statement")
+ if s.compile_time_eval:
+ include_file_path = s.context.find_include_file(include_file_name, pos)
+ if include_file_path:
+ s.included_files.append(include_file_name)
+ f = open(include_file_path, "rU")
+ s2 = PyrexScanner(f, include_file_path, parent_scanner = s)
+ try:
+ tree = p_statement_list(s2, ctx)
+ finally:
+ f.close()
+ return tree
+ else:
+ return None
+ else:
+ return Nodes.PassStatNode(pos)
+
+def p_with_statement(s):
+ pos = s.position()
+ s.next() # 'with'
+# if s.sy == 'IDENT' and s.systring in ('gil', 'nogil'):
+ if s.sy == 'IDENT' and s.systring == 'nogil':
+ state = s.systring
+ s.next()
+ body = p_suite(s)
+ return Nodes.GILStatNode(pos, state = state, body = body)
+ else:
+ s.error("Only 'with nogil' implemented")
+
+def p_simple_statement(s, ctx):
+ if s.sy == 'global':
+ node = p_global_statement(s)
+ elif s.sy == 'print':
+ node = p_print_statement(s)
+ elif s.sy == 'del':
+ node = p_del_statement(s)
+ elif s.sy == 'break':
+ node = p_break_statement(s)
+ elif s.sy == 'continue':
+ node = p_continue_statement(s)
+ elif s.sy == 'return':
+ node = p_return_statement(s)
+ elif s.sy == 'raise':
+ node = p_raise_statement(s)
+ elif s.sy == 'cimport':
+ if ctx.level not in ('module', 'module_pxd'):
+ s.error("cimport statement not allowed in this context")
+ node = p_import_statement(s)
+ elif s.sy == 'import':
+ node = p_import_statement(s)
+ elif s.sy == 'from':
+ node = p_from_import_statement(s, ctx)
+ elif s.sy == 'assert':
+ node = p_assert_statement(s)
+ elif s.sy == 'pass':
+ node = p_pass_statement(s)
+ else:
+ node = p_expression_or_assignment(s)
+ return node
+
+def p_simple_statement_list(s, ctx):
+ # Parse a series of simple statements on one line
+ # separated by semicolons.
+ stat = p_simple_statement(s, ctx)
+ if s.sy == ';':
+ stats = [stat]
+ while s.sy == ';':
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ break
+ stats.append(p_simple_statement(s, ctx))
+ stat = Nodes.StatListNode(stats[0].pos, stats = stats)
+ s.expect_newline("Syntax error in simple statement list")
+ return stat
+
+def p_compile_time_expr(s):
+ old = s.compile_time_expr
+ s.compile_time_expr = 1
+ expr = p_expr(s)
+ s.compile_time_expr = old
+ return expr
+
+def p_DEF_statement(s):
+ pos = s.position()
+ denv = s.compile_time_env
+ s.next() # 'DEF'
+ name = p_ident(s)
+ s.expect('=')
+ expr = p_compile_time_expr(s)
+ value = expr.compile_time_value(denv)
+ #print "p_DEF_statement: %s = %r" % (name, value) ###
+ denv.declare(name, value)
+ s.expect_newline()
+ return Nodes.PassStatNode(pos)
+
+def p_IF_statement(s, ctx):
+ pos = s.position()
+ saved_eval = s.compile_time_eval
+ current_eval = saved_eval
+ denv = s.compile_time_env
+ result = None
+ while 1:
+ s.next() # 'IF' or 'ELIF'
+ expr = p_compile_time_expr(s)
+ s.compile_time_eval = current_eval and bool(expr.compile_time_value(denv))
+ body = p_suite(s, ctx)
+ if s.compile_time_eval:
+ result = body
+ current_eval = 0
+ if s.sy <> 'ELIF':
+ break
+ if s.sy == 'ELSE':
+ s.next()
+ s.compile_time_eval = current_eval
+ body = p_suite(s, ctx)
+ if current_eval:
+ result = body
+ if not result:
+ result = Nodes.PassStatNode(pos)
+ s.compile_time_eval = saved_eval
+ return result
+
+def p_statement(s, ctx):
+ pos = s.position()
+ cdef_flag = ctx.cdef_flag
+ if s.sy == 'ctypedef':
+ if ctx.level not in ('module', 'module_pxd'):
+ s.error("ctypedef statement not allowed here")
+ #if ctx.api:
+ # error(s.pos, "'api' not allowed with 'ctypedef'")
+ return p_ctypedef_statement(s, ctx)
+ elif s.sy == 'DEF':
+ return p_DEF_statement(s)
+ elif s.sy == 'IF':
+ return p_IF_statement(s, ctx)
+ else:
+ if s.sy == 'cdef':
+ cdef_flag = 1
+ s.next()
+ if s.sy == '+':
+ ctx = ctx(cplus_flag = 1)
+ s.next()
+ if cdef_flag:
+ if ctx.level not in ('module', 'module_pxd', 'function', 'c_class', 'c_class_pxd'):
+ s.error('cdef statement not allowed here')
+ return p_cdef_statement(s, ctx)
+ else:
+ if ctx.api:
+ error(s.pos, "'api' not allowed with this statement")
+ if s.sy == 'def':
+ if ctx.level not in ('module', 'class', 'c_class', 'property'):
+ s.error('def statement not allowed here')
+ return p_def_statement(s)
+ elif s.sy == 'class':
+ if ctx.level <> 'module':
+ s.error("class definition not allowed here")
+ return p_class_statement(s)
+ elif s.sy == 'include':
+ #if ctx.level not in ('module', 'module_pxd'):
+ # s.error("include statement not allowed here")
+ return p_include_statement(s, ctx)
+ elif ctx.level == 'c_class' and s.sy == 'IDENT' and s.systring == 'property':
+ return p_property_decl(s)
+ elif s.sy == 'pass' and ctx.level <> 'property':
+ return p_pass_statement(s, with_newline = 1)
+ else:
+ if ctx.level in ('c_class', 'c_class_pxd', 'property'):
+ s.error("Executable statement not allowed here")
+ if s.sy == 'if':
+ return p_if_statement(s)
+ elif s.sy == 'while':
+ return p_while_statement(s)
+ elif s.sy == 'for':
+ return p_for_statement(s)
+ elif s.sy == 'try':
+ return p_try_statement(s)
+ elif s.sy == 'with':
+ return p_with_statement(s)
+ else:
+ return p_simple_statement_list(s, ctx)
+
+def p_statement_list(s, ctx):
+ # Parse a series of statements separated by newlines.
+ pos = s.position()
+ stats = []
+ while s.sy not in ('DEDENT', 'EOF'):
+ stats.append(p_statement(s, ctx))
+ if len(stats) == 1:
+ return stats[0]
+ else:
+ return Nodes.StatListNode(pos, stats = stats)
+
+def p_suite(s, ctx = Ctx(), with_doc = 0, with_pseudo_doc = 0):
+ pos = s.position()
+ s.expect(':')
+ doc = None
+ stmts = []
+ if s.sy == 'NEWLINE':
+ s.next()
+ s.expect_indent()
+ if with_doc or with_pseudo_doc:
+ doc = p_doc_string(s)
+ body = p_statement_list(s, ctx)
+ s.expect_dedent()
+ else:
+ if ctx.api:
+ error(s.pos, "'api' not allowed with this statement")
+ if ctx.level in ('module', 'class', 'function', 'other'):
+ body = p_simple_statement_list(s, ctx)
+ else:
+ body = p_pass_statement(s)
+ s.expect_newline("Syntax error in declarations")
+ if with_doc:
+ return doc, body
+ else:
+ return body
+
+def p_c_base_type(s, self_flag = 0):
+ # If self_flag is true, this is the base type for the
+ # self argument of a C method of an extension type.
+ if s.sy == '(':
+ return p_c_complex_base_type(s)
+ else:
+ return p_c_simple_base_type(s, self_flag)
+
+def p_calling_convention(s):
+ if s.sy == 'IDENT' and s.systring in calling_convention_words:
+ result = s.systring
+ s.next()
+ return result
+ else:
+ return ""
+
+calling_convention_words = ("__stdcall", "__cdecl", "__fastcall")
+
+def p_c_complex_base_type(s):
+ # s.sy == '('
+ pos = s.position()
+ s.next()
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ s.expect(')')
+ return Nodes.CComplexBaseTypeNode(pos,
+ base_type = base_type, declarator = declarator)
+
+def p_c_simple_base_type(s, self_flag):
+ #print "p_c_simple_base_type: self_flag =", self_flag
+ is_basic = 0
+ signed = 1
+ longness = 0
+ module_path = []
+ pos = s.position()
+ if looking_at_base_type(s):
+ #print "p_c_simple_base_type: looking_at_base_type at", s.position()
+ is_basic = 1
+ signed, longness = p_sign_and_longness(s)
+ if s.sy == 'IDENT' and s.systring in basic_c_type_names:
+ name = s.systring
+ s.next()
+ else:
+ name = 'int'
+ elif s.looking_at_type_name() or looking_at_dotted_name(s):
+ #print "p_c_simple_base_type: looking_at_type_name at", s.position()
+ name = s.systring
+ s.next()
+ while s.sy == '.':
+ module_path.append(name)
+ s.next()
+ name = p_ident(s)
+ else:
+ #print "p_c_simple_base_type: not looking at type at", s.position()
+ name = None
+ return Nodes.CSimpleBaseTypeNode(pos,
+ name = name, module_path = module_path,
+ is_basic_c_type = is_basic, signed = signed,
+ longness = longness, is_self_arg = self_flag)
+
+def looking_at_type(s):
+ return looking_at_base_type(s) or s.looking_at_type_name()
+
+def looking_at_base_type(s):
+ #print "looking_at_base_type?", s.sy, s.systring, s.position()
+ return s.sy == 'IDENT' and s.systring in base_type_start_words
+
+def looking_at_dotted_name(s):
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ result = s.sy == '.'
+ s.put_back('IDENT', name)
+ return result
+ else:
+ return 0
+
+basic_c_type_names = ("void", "char", "int", "float", "double") #,
+ #"size_t", "Py_ssize_t")
+
+sign_and_longness_words = ("short", "long", "signed", "unsigned")
+
+base_type_start_words = \
+ basic_c_type_names + sign_and_longness_words
+
+def p_sign_and_longness(s):
+ signed = 1
+ longness = 0
+ while s.sy == 'IDENT' and s.systring in sign_and_longness_words:
+ if s.systring == 'unsigned':
+ signed = 0
+ elif s.systring == 'signed':
+ signed = 2
+ elif s.systring == 'short':
+ longness = -1
+ elif s.systring == 'long':
+ longness += 1
+ s.next()
+ return signed, longness
+
+def p_opt_cname(s):
+ literal = p_opt_string_literal(s)
+ if literal:
+ _, cname = literal
+ else:
+ cname = None
+ return cname
+
+def p_c_declarator(s, ctx = Ctx(), empty = 0, is_type = 0, cmethod_flag = 0, nonempty = 0,
+ calling_convention_allowed = 0):
+ # If empty is true, the declarator must be empty. If nonempty is true,
+ # the declarator must be nonempty. Otherwise we don't care.
+ # If cmethod_flag is true, then if this declarator declares
+ # a function, it's a C method of an extension type.
+ pos = s.position()
+ if s.sy == '(':
+ s.next()
+ if s.sy == ')' or looking_at_type(s):
+ base = Nodes.CNameDeclaratorNode(pos, name = "", cname = None)
+ result = p_c_func_declarator(s, pos, ctx, base, cmethod_flag)
+ else:
+ result = p_c_declarator(s, ctx, empty = empty, is_type = is_type,
+ cmethod_flag = cmethod_flag, nonempty = nonempty,
+ calling_convention_allowed = 1)
+ s.expect(')')
+ else:
+ result = p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag, nonempty)
+ if not calling_convention_allowed and result.calling_convention and s.sy <> '(':
+ error(s.position(), "%s on something that is not a function"
+ % result.calling_convention)
+ while s.sy in ('[', '('):
+ pos = s.position()
+ if s.sy == '[':
+ result = p_c_array_declarator(s, result)
+ else: # sy == '('
+ s.next()
+ result = p_c_func_declarator(s, pos, ctx, result, cmethod_flag)
+ cmethod_flag = 0
+ return result
+
+def p_c_array_declarator(s, base):
+ pos = s.position()
+ s.next() # '['
+ if s.sy <> ']':
+ dim = p_expr(s)
+ else:
+ dim = None
+ s.expect(']')
+ return Nodes.CArrayDeclaratorNode(pos, base = base, dimension = dim)
+
+def p_c_func_declarator(s, pos, ctx, base, cmethod_flag):
+ # Opening paren has already been skipped
+ args = p_c_arg_list(s, ctx, cmethod_flag = cmethod_flag,
+ nonempty_declarators = 0)
+ ellipsis = p_optional_ellipsis(s)
+ s.expect(')')
+ nogil = p_nogil(s)
+ exc_val, exc_check = p_exception_value_clause(s)
+ with_gil = p_with_gil(s)
+ return Nodes.CFuncDeclaratorNode(pos,
+ base = base, args = args, has_varargs = ellipsis,
+ exception_value = exc_val, exception_check = exc_check,
+ nogil = nogil or ctx.nogil or with_gil, with_gil = with_gil)
+
+def p_c_simple_declarator(s, ctx, empty, is_type, cmethod_flag, nonempty):
+ pos = s.position()
+ calling_convention = p_calling_convention(s)
+ if s.sy == '*':
+ s.next()
+ base = p_c_declarator(s, ctx, empty = empty, is_type = is_type,
+ cmethod_flag = cmethod_flag, nonempty = nonempty)
+ result = Nodes.CPtrDeclaratorNode(pos,
+ base = base)
+ elif s.sy == '**': # scanner returns this as a single token
+ s.next()
+ base = p_c_declarator(s, ctx, empty = empty, is_type = is_type,
+ cmethod_flag = cmethod_flag, nonempty = nonempty)
+ result = Nodes.CPtrDeclaratorNode(pos,
+ base = Nodes.CPtrDeclaratorNode(pos,
+ base = base))
+ else:
+ if s.sy == 'IDENT':
+ name = s.systring
+ if is_type:
+ s.add_type_name(name)
+ if empty:
+ error(s.position(), "Declarator should be empty")
+ s.next()
+ cname = p_opt_cname(s)
+ else:
+ if nonempty:
+ error(s.position(), "Empty declarator")
+ name = ""
+ cname = None
+ result = Nodes.CNameDeclaratorNode(pos,
+ name = name, cname = cname)
+ result.calling_convention = calling_convention
+ return result
+
+def p_nogil(s):
+ if s.sy == 'IDENT' and s.systring == 'nogil':
+ s.next()
+ return 1
+ else:
+ return 0
+
+def p_with_gil(s):
+ if s.sy == 'with':
+ s.next()
+ s.expect_keyword('gil')
+ return 1
+ else:
+ return 0
+
+def p_exception_value_clause(s):
+ exc_val = None
+ exc_check = 0
+ if s.sy == 'except':
+ s.next()
+ if s.sy == '*':
+ exc_check = 1
+ s.next()
+ else:
+ if s.sy == '?':
+ exc_check = 1
+ s.next()
+ exc_val = p_simple_expr(s)
+ return exc_val, exc_check
+
+c_arg_list_terminators = ('*', '**', '.', ')')
+
+def p_c_arg_list(s, ctx = Ctx(), in_pyfunc = 0, cmethod_flag = 0, nonempty_declarators = 0,
+ kw_only = 0):
+ # Comma-separated list of C argument declarations, possibly empty.
+ # May have a trailing comma.
+ args = []
+ is_self_arg = cmethod_flag
+ while s.sy not in c_arg_list_terminators:
+ args.append(p_c_arg_decl(s, ctx, in_pyfunc, is_self_arg,
+ nonempty = nonempty_declarators, kw_only = kw_only))
+ if s.sy <> ',':
+ break
+ s.next()
+ is_self_arg = 0
+ return args
+
+def p_optional_ellipsis(s):
+ if s.sy == '.':
+ expect_ellipsis(s)
+ return 1
+ else:
+ return 0
+
+def p_c_arg_decl(s, ctx, in_pyfunc, cmethod_flag = 0, nonempty = 0, kw_only = 0):
+ pos = s.position()
+ allow_none = None
+ #not_none = 0
+ default = None
+ base_type = p_c_base_type(s, cmethod_flag)
+ declarator = p_c_declarator(s, ctx, nonempty = nonempty)
+ if s.sy in ('or', 'not'):
+ or_not = s.sy
+ s.next()
+ if s.sy == 'IDENT' and s.systring == 'None':
+ s.next()
+ else:
+ s.error("Expected 'None'")
+ if not in_pyfunc:
+ error(pos, "'%s None' only allowed in Python functions" % or_not)
+ allow_none = or_not == 'or'
+ if s.sy == '=':
+ s.next()
+ default = p_simple_expr(s)
+ return Nodes.CArgDeclNode(pos,
+ base_type = base_type,
+ declarator = declarator,
+ allow_none = allow_none,
+ default = default,
+ kw_only = kw_only)
+
+def p_api(s):
+ if s.sy == 'IDENT' and s.systring == 'api':
+ s.next()
+ return 1
+ else:
+ return 0
+
+def p_cdef_statement(s, ctx):
+ ctx = ctx(cdef_flag = 1)
+ pos = s.position()
+ ctx.visibility = p_visibility(s, ctx.visibility)
+ ctx.api = ctx.api or p_api(s)
+ if ctx.api:
+ if ctx.visibility not in ('private', 'public'):
+ error(pos, "Cannot combine 'api' with '%s'" % visibility)
+ if ctx.visibility == 'extern' and s.sy == 'from':
+ return p_cdef_extern_block(s, pos, ctx)
+ if p_nogil(s):
+ ctx.nogil = 1
+ if s.sy == ':':
+ return p_cdef_block(s, ctx)
+ elif s.sy == 'class':
+ if ctx.level not in ('module', 'module_pxd'):
+ error(pos, "Extension type definition not allowed here")
+ #if api:
+ # error(pos, "'api' not allowed with extension class")
+ return p_c_class_definition(s, pos, ctx)
+ elif s.sy == 'IDENT' and s.systring in struct_union_or_enum:
+ if ctx.level not in ('module', 'module_pxd'):
+ error(pos, "C struct/union/enum definition not allowed here")
+ #if ctx.visibility == 'public':
+ # error(pos, "Public struct/union/enum definition not implemented")
+ #if ctx.api:
+ # error(pos, "'api' not allowed with '%s'" % s.systring)
+ if s.systring == "enum":
+ return p_c_enum_definition(s, pos, ctx)
+ else:
+ return p_c_struct_or_union_definition(s, pos, ctx)
+ elif s.sy == 'pass':
+ node = p_pass_statement(s)
+ s.expect_newline('Expected a newline')
+ return node
+ else:
+ return p_c_func_or_var_declaration(s, pos, ctx)
+
+def p_cdef_block(s, ctx):
+ return p_suite(s, ctx(cdef_flag = 1))
+
+def p_cdef_extern_block(s, pos, ctx):
+ include_file = None
+ s.expect('from')
+ if s.sy == '*':
+ s.next()
+ else:
+ _, include_file = p_string_literal(s)
+ ctx = ctx(cdef_flag = 1, visibility = 'extern', extern_from = True)
+ if p_nogil(s):
+ ctx.nogil = 1
+ body = p_suite(s, ctx)
+ return Nodes.CDefExternNode(pos,
+ include_file = include_file,
+ body = body)
+
+struct_union_or_enum = (
+ "struct", "union", "enum"
+)
+
+def p_c_enum_definition(s, pos, ctx):
+ # s.sy == ident 'enum'
+ s.next()
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ s.add_type_name(name)
+ cname = p_opt_cname(s)
+ else:
+ name = None
+ cname = None
+ items = None
+ s.expect(':')
+ items = []
+ if s.sy <> 'NEWLINE':
+ p_c_enum_line(s, items)
+ else:
+ s.next() # 'NEWLINE'
+ s.expect_indent()
+ while s.sy not in ('DEDENT', 'EOF'):
+ p_c_enum_line(s, items)
+ s.expect_dedent()
+ return Nodes.CEnumDefNode(pos, name = name, cname = cname, items = items,
+ typedef_flag = ctx.typedef_flag,
+ visibility = ctx.visibility,
+ in_pxd = ctx.level == 'module_pxd')
+
+def p_c_enum_line(s, items):
+ if s.sy <> 'pass':
+ p_c_enum_item(s, items)
+ while s.sy == ',':
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ break
+ p_c_enum_item(s, items)
+ else:
+ s.next()
+ s.expect_newline("Syntax error in enum item list")
+
+def p_c_enum_item(s, items):
+ pos = s.position()
+ name = p_ident(s)
+ cname = p_opt_cname(s)
+ value = None
+ if s.sy == '=':
+ s.next()
+ value = p_simple_expr(s)
+ items.append(Nodes.CEnumDefItemNode(pos,
+ name = name, cname = cname, value = value))
+
+def p_c_struct_or_union_definition(s, pos, ctx):
+ # s.sy == ident 'struct' or 'union'
+ ctx.cplus_check(pos)
+ kind = s.systring
+ s.next()
+ module_path, name = p_qualified_name(s)
+ bases = []
+ if s.sy == '(':
+ s.next()
+ while s.sy == 'IDENT':
+ bases.append(p_qualified_name(s))
+ if s.sy <> ',':
+ break
+ s.next()
+ s.expect(')')
+ if bases and not ctx.cplus_flag:
+ error(s, "Only C++ struct may have bases")
+ cname = p_opt_cname(s)
+ s.add_type_name(name)
+ attributes = None
+ if s.sy == ':':
+ s.next()
+ s.expect('NEWLINE')
+ s.expect_indent()
+ attributes = []
+ body_ctx = Ctx()
+ while s.sy <> 'DEDENT':
+ if s.sy <> 'pass':
+ attributes.append(p_c_func_or_var_declaration(s,
+ s.position(), body_ctx))
+ else:
+ s.next()
+ s.expect_newline("Expected a newline")
+ s.expect_dedent()
+ else:
+ s.expect_newline("Syntax error in struct or union definition")
+ return Nodes.CStructOrUnionDefNode(pos,
+ name = name, cname = cname, module_path = module_path,
+ kind = kind, attributes = attributes,
+ typedef_flag = ctx.typedef_flag,
+ visibility = ctx.visibility,
+ in_pxd = ctx.level == 'module_pxd',
+ cplus_flag = ctx.cplus_flag,
+ bases = bases)
+
+def p_visibility(s, prev_visibility):
+ pos = s.position()
+ visibility = prev_visibility
+ if s.sy == 'IDENT' and s.systring in ('extern', 'public', 'readonly'):
+ visibility = s.systring
+ if prev_visibility <> 'private' and visibility <> prev_visibility:
+ s.error("Conflicting visibility options '%s' and '%s'"
+ % (prev_visibility, visibility))
+ s.next()
+ return visibility
+
+def p_c_func_or_var_declaration(s, pos, ctx):
+ cmethod_flag = ctx.level in ('c_class', 'c_class_pxd')
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, ctx, cmethod_flag = cmethod_flag, nonempty = 1)
+ if s.sy == ':':
+ if ctx.level not in ('module', 'c_class'):
+ s.error("C function definition not allowed here")
+ suite = p_suite(s, Ctx(level = 'function'), with_pseudo_doc = 1)
+ result = Nodes.CFuncDefNode(pos,
+ visibility = ctx.visibility,
+ base_type = base_type,
+ declarator = declarator,
+ body = suite,
+ api = ctx.api)
+ else:
+ #if api:
+ # error(pos, "'api' not allowed with variable declaration")
+ declarators = [declarator]
+ while s.sy == ',':
+ s.next()
+ if s.sy == 'NEWLINE':
+ break
+ declarator = p_c_declarator(s, ctx, cmethod_flag = cmethod_flag, nonempty = 1)
+ declarators.append(declarator)
+ s.expect_newline("Syntax error in C variable declaration")
+ result = Nodes.CVarDefNode(pos,
+ visibility = ctx.visibility,
+ base_type = base_type,
+ declarators = declarators,
+ in_pxd = ctx.level == 'module_pxd',
+ api = ctx.api)
+ return result
+
+def p_ctypedef_statement(s, ctx):
+ # s.sy == 'ctypedef'
+ pos = s.position()
+ s.next()
+ visibility = p_visibility(s, ctx.visibility)
+ api = p_api(s)
+ ctx = ctx(typedef_flag = 1, visibility = visibility)
+ if api:
+ ctx.api = 1
+ if s.sy == 'class':
+ return p_c_class_definition(s, pos, ctx)
+ elif s.sy == 'IDENT' and s.systring in ('struct', 'union', 'enum'):
+ if s.systring == 'enum':
+ return p_c_enum_definition(s, pos, ctx)
+ else:
+ return p_c_struct_or_union_definition(s, pos, ctx)
+ else:
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, ctx, is_type = 1, nonempty = 1)
+ s.expect_newline("Syntax error in ctypedef statement")
+ return Nodes.CTypeDefNode(pos,
+ base_type = base_type, declarator = declarator,
+ visibility = ctx.visibility,
+ in_pxd = ctx.level == 'module_pxd')
+
+def p_def_statement(s):
+ # s.sy == 'def'
+ pos = s.position()
+ s.next()
+ name = p_ident(s)
+ #args = []
+ s.expect('(');
+ args = p_c_arg_list(s, in_pyfunc = 1, nonempty_declarators = 1)
+ star_arg = None
+ starstar_arg = None
+ if s.sy == '*':
+ s.next()
+ if s.sy == 'IDENT':
+ star_arg = p_py_arg_decl(s)
+ if s.sy == ',':
+ s.next()
+ args.extend(p_c_arg_list(s, in_pyfunc = 1,
+ nonempty_declarators = 1, kw_only = 1))
+ elif s.sy <>')':
+ s.error("Syntax error in Python function argument list")
+ if s.sy == '**':
+ s.next()
+ starstar_arg = p_py_arg_decl(s)
+ s.expect(')')
+ if p_nogil(s):
+ error(s.pos, "Python function cannot be declared nogil")
+ doc, body = p_suite(s, Ctx(level = 'function'), with_doc = 1)
+ return Nodes.DefNode(pos, name = name, args = args,
+ star_arg = star_arg, starstar_arg = starstar_arg,
+ doc = doc, body = body)
+
+def p_py_arg_decl(s):
+ pos = s.position()
+ name = p_ident(s)
+ return Nodes.PyArgDeclNode(pos, name = name)
+
+def p_class_statement(s):
+ # s.sy == 'class'
+ pos = s.position()
+ s.next()
+ class_name = p_ident(s)
+ if s.sy == '(':
+ s.next()
+ base_list = p_simple_expr_list(s)
+ s.expect(')')
+ else:
+ base_list = []
+ doc, body = p_suite(s, Ctx(level = 'class'), with_doc = 1)
+ return Nodes.PyClassDefNode(pos,
+ name = class_name,
+ bases = ExprNodes.TupleNode(pos, args = base_list),
+ doc = doc, body = body)
+
+def p_qualified_name(s):
+ path = []
+ name = p_ident(s)
+ while s.sy == '.':
+ s.next()
+ path.append(name)
+ name = p_ident(s)
+ return path, name
+
+class CClassOptions:
+
+ objstruct_cname = None
+ typeobj_cname = None
+ no_gc = 0
+
+
+def p_c_class_definition(s, pos, ctx):
+ # s.sy == 'class'
+ s.next()
+ module_path, class_name = p_qualified_name(s)
+ if module_path and s.sy == 'IDENT' and s.systring == 'as':
+ s.next()
+ as_name = p_ident(s)
+ else:
+ as_name = class_name
+ s.add_type_name(as_name)
+ options = CClassOptions()
+ base_class_module = None
+ base_class_name = None
+ if s.sy == '(':
+ s.next()
+ base_class_path, base_class_name = p_qualified_name(s)
+ if s.sy == ',':
+ s.error("C class may only have one base class")
+ s.expect(')')
+ base_class_module = ".".join(base_class_path)
+ if s.sy == '[':
+ p_c_class_options(s, ctx, options)
+ if s.sy == ':':
+ if ctx.level == 'module_pxd':
+ body_level = 'c_class_pxd'
+ else:
+ body_level = 'c_class'
+ doc, body = p_suite(s, Ctx(level = body_level), with_doc = 1)
+ else:
+ s.expect_newline("Syntax error in C class definition")
+ doc = None
+ body = None
+ if ctx.visibility == 'extern':
+ if not module_path:
+ error(pos, "Module name required for 'extern' C class")
+ if options.typeobj_cname:
+ error(pos, "Type object name specification not allowed for 'extern' C class")
+ elif ctx.visibility == 'public':
+ if not options.objstruct_cname:
+ error(pos, "Object struct name specification required for 'public' C class")
+ if not options.typeobj_cname:
+ error(pos, "Type object name specification required for 'public' C class")
+ else:
+ if ctx.api:
+ error(pos, "Only 'public' C class can be declared 'api'")
+ return Nodes.CClassDefNode(pos,
+ visibility = ctx.visibility,
+ typedef_flag = ctx.typedef_flag,
+ api = ctx.api,
+ module_name = ".".join(module_path),
+ class_name = class_name,
+ as_name = as_name,
+ base_class_module = base_class_module,
+ base_class_name = base_class_name,
+ options = options,
+ in_pxd = ctx.level == 'module_pxd',
+ doc = doc,
+ body = body)
+
+def p_c_class_options(s, ctx, options):
+ s.expect('[')
+ while 1:
+ if s.sy <> 'IDENT':
+ break
+ if s.systring == 'object':
+ if ctx.visibility not in ('public', 'extern'):
+ error(s.position(), "Object name option only allowed for 'public' or 'extern' C class")
+ s.next()
+ options.objstruct_cname = p_ident(s)
+ elif s.systring == 'type':
+ if ctx.visibility not in ('public', 'extern'):
+ error(s.position(), "Type name option only allowed for 'public' or 'extern' C class")
+ s.next()
+ options.typeobj_cname = p_ident(s)
+ elif s.systring == 'nogc':
+ s.next()
+ options.no_gc = 1
+ else:
+ s.error("Unrecognised C class option '%s'" % s.systring)
+ if s.sy <> ',':
+ break
+ s.next()
+ s.expect(']', "Expected a C class option")
+
+def p_property_decl(s):
+ pos = s.position()
+ s.next() # 'property'
+ name = p_ident(s)
+ doc, body = p_suite(s, Ctx(level = 'property'), with_doc = 1)
+ return Nodes.PropertyNode(pos, name = name, doc = doc, body = body)
+
+def p_doc_string(s):
+ if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ _, result = p_cat_string_literal(s)
+ if s.sy <> 'EOF':
+ s.expect_newline("Syntax error in doc string")
+ return result
+ else:
+ return None
+
+def p_module(s, pxd):
+ s.add_type_name("object")
+ pos = s.position()
+ doc = p_doc_string(s)
+ if pxd:
+ level = 'module_pxd'
+ else:
+ level = 'module'
+ body = p_statement_list(s, Ctx(level = level))
+ if s.sy <> 'EOF':
+ s.error("Syntax error in statement [%s,%s]" % (
+ repr(s.sy), repr(s.systring)))
+ return ModuleNode(pos, doc = doc, body = body)
+
+#----------------------------------------------
+#
+# Debugging
+#
+#----------------------------------------------
+
+def print_parse_tree(f, node, level, key = None):
+ ind = " " * level
+ if node:
+ f.write(ind)
+ if key:
+ f.write("%s: " % key)
+ t = type(node)
+ if t == TupleType:
+ f.write("(%s @ %s\n" % (node[0], node[1]))
+ for i in xrange(2, len(node)):
+ print_parse_tree(f, node[i], level+1)
+ f.write("%s)\n" % ind)
+ return
+ elif isinstance(node, Node):
+ try:
+ tag = node.tag
+ except AttributeError:
+ tag = node.__class__.__name__
+ f.write("%s @ %s\n" % (tag, node.pos))
+ for name, value in node.__dict__.items():
+ if name <> 'tag' and name <> 'pos':
+ print_parse_tree(f, value, level+1, name)
+ return
+ elif t == ListType:
+ f.write("[\n")
+ for i in xrange(len(node)):
+ print_parse_tree(f, node[i], level+1)
+ f.write("%s]\n" % ind)
+ return
+ f.write("%s%s\n" % (ind, node))
+
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/PyrexTypes.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/PyrexTypes.py
new file mode 100644
index 00000000..7d4f244a
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/PyrexTypes.py
@@ -0,0 +1,974 @@
+#
+# Pyrex - Types
+#
+
+import string
+import Naming
+
+class BaseType:
+ #
+ # Base class for all Pyrex types including pseudo-types.
+
+ def cast_code(self, expr_code):
+ return "((%s)%s)" % (self.declaration_code(""), expr_code)
+
+ def base_declaration_code(self, base_code, entity_code):
+ if entity_code:
+ return "%s %s" % (base_code, entity_code)
+ else:
+ return base_code
+
+
+class PyrexType(BaseType):
+ #
+ # Base class for all non-pseudo Pyrex types.
+ #
+ # is_pyobject boolean Is a Python object type
+ # is_extension_type boolean Is a Python extension type
+ # is_numeric boolean Is a C numeric type
+ # is_int boolean Is a C integer type
+ # is_float boolean Is a C floating point type
+ # is_void boolean Is the C void type
+ # is_array boolean Is a C array type
+ # is_ptr boolean Is a C pointer type
+ # is_null_ptr boolean Is the type of NULL
+ # is_cfunction boolean Is a C function type
+ # is_struct_or_union boolean Is a C struct or union type
+ # is_enum boolean Is a C enum type
+ # is_typedef boolean Is a typedef type
+ # is_string boolean Is a C char * type
+ # is_returncode boolean Is used only to signal exceptions
+ # is_sequence boolean Is a sequence type
+ # is_builtin boolean Is a built-in Python type
+ # is_error boolean Is the dummy error type
+ # has_attributes boolean Has C dot-selectable attributes
+ # default_value string Initial value
+ # parsetuple_format string Format char for PyArg_ParseTuple
+ # pymemberdef_typecode string Type code for PyMemberDef struct
+ #
+ # declaration_code(entity_code,
+ # for_display = 0, dll_linkage = None, pyrex = 0)
+ # Returns a code fragment for the declaration of an entity
+ # of this type, given a code fragment for the entity.
+ # * If for_display, this is for reading by a human in an error
+ # message; otherwise it must be valid C code.
+ # * If dll_linkage is not None, it must be 'DL_EXPORT' or
+ # 'DL_IMPORT', and will be added to the base type part of
+ # the declaration.
+ # * If pyrex = 1, this is for use in a 'cdef extern'
+ # statement of a Pyrex include file.
+ #
+ # assignable_from(src_type)
+ # Tests whether a variable of this type can be
+ # assigned a value of type src_type.
+ #
+ # same_as(other_type)
+ # Tests whether this type represents the same type
+ # as other_type.
+ #
+ # as_argument_type():
+ # Coerces array type into pointer type for use as
+ # a formal argument type.
+ #
+
+ is_pyobject = 0
+ is_extension_type = 0
+ is_numeric = 0
+ is_int = 0
+ is_float = 0
+ is_void = 0
+ is_array = 0
+ is_ptr = 0
+ is_null_ptr = 0
+ is_cfunction = 0
+ is_struct_or_union = 0
+ is_enum = 0
+ is_typedef = 0
+ is_string = 0
+ is_returncode = 0
+ is_sequence = 0
+ is_builtin = 0
+ is_error = 0
+ has_attributes = 0
+ default_value = ""
+ parsetuple_format = ""
+ pymemberdef_typecode = None
+
+ def resolve(self):
+ # If a typedef, returns the base type.
+ return self
+
+ def literal_code(self, value):
+ # Returns a C code fragment representing a literal
+ # value of this type.
+ return str(value)
+
+ def __str__(self):
+ return string.strip(self.declaration_code("", for_display = 1))
+
+ def same_as(self, other_type, **kwds):
+ return self.same_as_resolved_type(other_type.resolve(), **kwds)
+
+ def same_as_resolved_type(self, other_type):
+ return self is other_type or other_type is error_type
+
+ def subtype_of(self, other_type):
+ return self.subtype_of_resolved_type(other_type.resolve())
+
+ def subtype_of_resolved_type(self, other_type):
+ return self.same_as(other_type)
+
+ def assignable_from(self, src_type):
+ return self.assignable_from_resolved_type(src_type.resolve())
+
+ def assignable_from_resolved_type(self, src_type):
+ return self.same_as(src_type)
+
+ def as_argument_type(self):
+ return self
+
+ def is_complete(self):
+ # A type is incomplete if it is an unsized array,
+ # a struct whose attributes are not defined, etc.
+ return 1
+
+
+class TypeWrapper(BaseType):
+ # Base class for pseudo-types that delegate most
+ # attribute lookups to another type.
+ #
+ # delegate_type PyrexType
+
+ def __init__(self, base_type):
+ self.delegate_type = base_type
+
+ def __getattr__(self, name):
+ return getattr(self.delegate_type, name)
+
+ def define(self, base_type):
+ self.delegate_type = base_type
+
+ def resolve(self):
+ return self.delegate_type.resolve()
+
+
+class CTypedefType(TypeWrapper):
+ #
+ # Pseudo-type defined with a ctypedef statement in a
+ # 'cdef extern from' block. Delegates most attribute
+ # lookups to the base type. ANYTHING NOT DEFINED
+ # HERE IS DELEGATED!
+ #
+ # qualified_name string
+ # typedef_cname string
+ # typedef_base_type PyrexType
+
+ is_typedef = 1
+
+ def __init__(self, cname, base_type):
+ TypeWrapper.__init__(self, base_type)
+ self.typedef_cname = cname
+ self.typedef_base_type = base_type
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ name = self.declaration_name(for_display, pyrex)
+ return self.base_declaration_code(name, entity_code)
+
+ def declaration_name(self, for_display = 0, pyrex = 0):
+ if pyrex or for_display:
+ return self.qualified_name
+ else:
+ return self.typedef_cname
+
+ def as_argument_type(self):
+ return self
+
+ def __repr__(self):
+ return "<CTypedefType %s>" % self.typedef_cname
+
+ def __str__(self):
+ return self.declaration_name(for_display = 1)
+
+
+class PyObjectType(PyrexType):
+ #
+ # Base class for all Python object types (reference-counted).
+ #
+
+ is_pyobject = 1
+ default_value = "0"
+ parsetuple_format = "O"
+ pymemberdef_typecode = "T_OBJECT"
+
+ def __str__(self):
+ return "Python object"
+
+ def __repr__(self):
+ return "<PyObjectType>"
+
+ def assignable_from(self, src_type):
+ return 1 # Conversion will be attempted
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex or for_display:
+ return self.base_declaration_code("object", entity_code)
+ else:
+ return "%s *%s" % (public_decl("PyObject", dll_linkage), entity_code)
+
+
+class PyExtensionType(PyObjectType):
+ #
+ # A Python extension type.
+ #
+ # name string
+ # scope CClassScope Attribute namespace
+ # visibility string
+ # typedef_flag boolean
+ # base_type PyExtensionType or None
+ # module_name string or None Qualified name of defining module
+ # objstruct_cname string Name of PyObject struct
+ # typeobj_cname string or None C code fragment referring to type object
+ # typeptr_cname string or None Name of pointer to external type object
+ # vtabslot_cname string Name of C method table member
+ # vtabstruct_cname string Name of C method table struct
+ # vtabptr_cname string Name of pointer to C method table
+ # vtable_cname string Name of C method table definition
+
+ is_extension_type = 1
+ has_attributes = 1
+
+ def __init__(self, name, typedef_flag, base_type):
+ self.name = name
+ self.scope = None
+ self.typedef_flag = typedef_flag
+ self.base_type = base_type
+ self.module_name = None
+ self.objstruct_cname = None
+ self.typeobj_cname = None
+ self.typeptr_cname = None
+ self.vtabslot_cname = None
+ self.vtabstruct_cname = None
+ self.vtabptr_cname = None
+ self.vtable_cname = None
+ if base_type and base_type.is_sequence:
+ self.is_sequence = 1
+
+ def set_scope(self, scope):
+ self.scope = scope
+ if scope:
+ scope.parent_type = self
+
+ def subtype_of_resolved_type(self, other_type):
+ if other_type.is_extension_type:
+ return self is other_type or (
+ self.base_type and self.base_type.subtype_of(other_type))
+ else:
+ return other_type is py_object_type
+
+ def typeobj_is_available(self):
+ # Do we have a pointer to the type object?
+ return self.typeptr_cname
+
+ def typeobj_is_imported(self):
+ # If we don't know the C name of the type object but we do
+ # know which module it's defined in, it will be imported.
+ return self.typeobj_cname is None and self.module_name is not None
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex or for_display:
+ return self.base_declaration_code(self.name, entity_code)
+ else:
+ if self.typedef_flag:
+ base_format = "%s"
+ else:
+ base_format = "struct %s"
+ base = public_decl(base_format % self.objstruct_cname, dll_linkage)
+ return "%s *%s" % (base, entity_code)
+
+ def attributes_known(self):
+ return self.scope is not None
+
+ def is_defined(self):
+ scope = self.scope
+ return scope and (scope.defined or scope.implemented)
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "<PyExtensionType %s%s>" % (self.scope.class_name,
+ ("", " typedef")[self.typedef_flag])
+
+
+class CType(PyrexType):
+ #
+ # Base class for all C types (non-reference-counted).
+ #
+ # to_py_function string C function for converting to Python object
+ # from_py_function string C function for constructing from Python object
+ #
+
+ to_py_function = None
+ from_py_function = None
+
+
+class CVoidType(CType):
+ is_void = 1
+
+ def __repr__(self):
+ return "<CVoidType>"
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ base = public_decl("void", dll_linkage)
+ return self.base_declaration_code(base, entity_code)
+
+ def is_complete(self):
+ return 0
+
+
+class CNumericType(CType):
+ #
+ # Base class for all C numeric types.
+ #
+ # rank integer Relative size
+ # signed integer 0 = unsigned, 1 = unspecified, 2 = explicitly signed
+ # name string or None to construct from sign and rank
+ #
+
+ is_numeric = 1
+ default_value = "0"
+
+ parsetuple_formats = ( # rank -> format
+ "BHIk?K???", # unsigned
+ "bhil?Lfd?", # assumed signed
+ "bhil?Lfd?", # explicitly signed
+ )
+
+ sign_words = ("unsigned ", "", "signed ")
+
+ def __init__(self, rank, signed, name, pymemberdef_typecode = None):
+ self.rank = rank
+ self.signed = signed
+ self.name = name
+ ptf = self.parsetuple_formats[signed][rank]
+ if ptf == '?':
+ ptf = None
+ self.parsetuple_format = ptf
+ self.pymemberdef_typecode = pymemberdef_typecode
+
+ def sign_and_name(self):
+ return self.name
+
+ def __repr__(self):
+ return "<CNumericType %s>" % self.sign_and_name()
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ base = public_decl(self.sign_and_name(), dll_linkage)
+ return self.base_declaration_code(base, entity_code)
+
+
+class CIntType(CNumericType):
+
+ is_int = 1
+ typedef_flag = 0
+ to_py_function = "PyInt_FromLong"
+ from_py_function = "PyInt_AsLong"
+
+ def __init__(self, rank, signed, name, pymemberdef_typecode = None, is_returncode = 0):
+ CNumericType.__init__(self, rank, signed, name, pymemberdef_typecode)
+ self.is_returncode = is_returncode
+
+ def assignable_from_resolved_type(self, src_type):
+ return src_type.is_int or src_type.is_enum or src_type is error_type
+
+
+class CAnonEnumType(CIntType):
+
+ is_enum = 1
+
+
+class CUIntType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLong"
+ from_py_function = "PyInt_AsUnsignedLongMask"
+
+
+class CULongType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLong"
+ from_py_function = "PyInt_AsUnsignedLongMask"
+
+
+class CLongLongType(CIntType):
+
+ to_py_function = "PyLong_FromLongLong"
+ from_py_function = "PyInt_AsUnsignedLongLongMask"
+
+
+class CULongLongType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLongLong"
+ from_py_function = "PyInt_AsUnsignedLongLongMask"
+
+
+class CPySSizeTType(CIntType):
+
+ to_py_function = "PyInt_FromSsize_t"
+ from_py_function = "PyInt_AsSsize_t"
+
+
+class CFloatType(CNumericType):
+
+ is_float = 1
+ to_py_function = "PyFloat_FromDouble"
+ from_py_function = "PyFloat_AsDouble"
+
+ def __init__(self, rank, name, pymemberdef_typecode = None):
+ CNumericType.__init__(self, rank, 1, name, pymemberdef_typecode)
+
+ def assignable_from_resolved_type(self, src_type):
+ return src_type.is_numeric or src_type is error_type
+
+
+class CArrayType(CType):
+ # base_type CType Element type
+ # size integer or None Number of elements
+
+ is_array = 1
+
+ def __init__(self, base_type, size):
+ self.base_type = base_type
+ self.size = size
+ if base_type is c_char_type:
+ self.is_string = 1
+
+ def __repr__(self):
+ return "<CArrayType %s %s>" % (self.size, repr(self.base_type))
+
+ def same_as_resolved_type(self, other_type):
+ return ((other_type.is_array and
+ self.base_type.same_as(other_type.base_type))
+ or other_type is error_type)
+
+ def assignable_from_resolved_type(self, src_type):
+ # Can't assign to a variable of an array type
+ return 0
+
+ def element_ptr_type(self):
+ return c_ptr_type(self.base_type)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if self.size is not None:
+ dimension_code = self.size
+ else:
+ dimension_code = ""
+ if entity_code.startswith("*"):
+ entity_code = "(%s)" % entity_code
+ return self.base_type.declaration_code(
+ "%s[%s]" % (entity_code, dimension_code),
+ for_display, dll_linkage, pyrex)
+
+ def as_argument_type(self):
+ return c_ptr_type(self.base_type)
+
+ def is_complete(self):
+ return self.size is not None
+
+
+class CPtrType(CType):
+ # base_type CType Referenced type
+
+ is_ptr = 1
+ default_value = "0"
+
+ def __init__(self, base_type):
+ self.base_type = base_type
+
+ def __repr__(self):
+ return "<CPtrType %s>" % repr(self.base_type)
+
+ def same_as_resolved_type(self, other_type):
+ return ((other_type.is_ptr and
+ self.base_type.same_as(other_type.base_type))
+ or other_type is error_type)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ #print "CPtrType.declaration_code: pointer to", self.base_type ###
+ return self.base_type.declaration_code(
+ "*%s" % entity_code,
+ for_display, dll_linkage, pyrex)
+
+ def assignable_from_resolved_type(self, other_type):
+ if other_type is error_type:
+ return 1
+ if other_type.is_null_ptr:
+ return 1
+ if self.base_type.is_cfunction:
+ if other_type.is_ptr:
+ other_type = other_type.base_type.resolve()
+ if other_type.is_cfunction:
+ return self.base_type.pointer_assignable_from_resolved_type(other_type)
+ else:
+ return 0
+ if other_type.is_array or other_type.is_ptr:
+ return self.base_type.is_void or self.base_type.same_as(other_type.base_type)
+ return 0
+
+
+class CNullPtrType(CPtrType):
+
+ is_null_ptr = 1
+
+
+class CFuncType(CType):
+ # return_type CType
+ # args [CFuncTypeArg]
+ # has_varargs boolean
+ # exception_value string
+ # exception_check boolean True if PyErr_Occurred check needed
+ # calling_convention string Function calling convention
+ # nogil boolean Can be called without gil
+ # with_gil boolean Acquire gil around function body
+
+ is_cfunction = 1
+ is_overloaded = 0
+
+ def __init__(self, return_type, args, has_varargs = 0,
+ exception_value = None, exception_check = 0, calling_convention = "",
+ nogil = 0, with_gil = 0):
+ self.return_type = return_type
+ self.args = args
+ self.has_varargs = has_varargs
+ self.exception_value = exception_value
+ self.exception_check = exception_check
+ self.calling_convention = calling_convention
+ self.nogil = nogil
+ self.with_gil = with_gil
+
+ def __repr__(self):
+ arg_reprs = map(repr, self.args)
+ if self.has_varargs:
+ arg_reprs.append("...")
+ return "<CFuncType %s %s[%s]>" % (
+ repr(self.return_type),
+ self.calling_convention_prefix(),
+ string.join(arg_reprs, ","))
+
+ def callable_with(self, actual_arg_types):
+ formal_arg_types = self.args
+ nf = len(formal_arg_types)
+ na = len(actual_arg_types)
+ if not (nf == na or self.has_varargs and nf >= na):
+ return False
+ for formal_type, actual_type in zip(formal_arg_types, actual_arg_types):
+ if not formal_type.assignable_from(actual_type):
+ return False
+ return True
+
+ def calling_convention_prefix(self):
+ cc = self.calling_convention
+ if cc:
+ return cc + " "
+ else:
+ return ""
+
+ def same_c_signature_as(self, other_type, as_cmethod = 0):
+ return self.same_c_signature_as_resolved_type(
+ other_type.resolve(), as_cmethod)
+
+ def same_c_signature_as_resolved_type(self, other_type, as_cmethod = 0):
+ #print "CFuncType.same_c_signature_as_resolved_type:", \
+ # self, other_type, "as_cmethod =", as_cmethod ###
+ if other_type is error_type:
+ return 1
+ if not other_type.is_cfunction:
+ return 0
+ nargs = len(self.args)
+ if nargs <> len(other_type.args):
+ return 0
+ # When comparing C method signatures, the first argument
+ # is exempt from compatibility checking (the proper check
+ # is performed elsewhere).
+ for i in range(as_cmethod, nargs):
+ if not self.args[i].type.same_as(
+ other_type.args[i].type):
+ return 0
+ if self.has_varargs <> other_type.has_varargs:
+ return 0
+ if not self.return_type.same_as(other_type.return_type):
+ return 0
+ if not self.same_calling_convention_as(other_type):
+ return 0
+ return 1
+
+ def same_calling_convention_as(self, other):
+ return self.calling_convention == other.calling_convention
+
+ def same_exception_signature_as(self, other_type):
+ return self.same_exception_signature_as_resolved_type(
+ other_type.resolve())
+
+ def same_exception_signature_as_resolved_type(self, other_type):
+ return self.exception_value == other_type.exception_value \
+ and self.exception_check == other_type.exception_check
+
+ def same_as_resolved_type(self, other_type, as_cmethod = 0):
+ return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \
+ and self.same_exception_signature_as_resolved_type(other_type) \
+ and self.nogil == other_type.nogil
+
+ def pointer_assignable_from_resolved_type(self, other_type):
+ return self.same_c_signature_as_resolved_type(other_type) \
+ and self.same_exception_signature_as_resolved_type(other_type) \
+ and not (self.nogil and not other_type.nogil)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ arg_decl_list = []
+ for arg in self.args:
+ arg_decl_list.append(
+ arg.type.declaration_code("", for_display, pyrex = pyrex))
+ if self.has_varargs:
+ arg_decl_list.append("...")
+ arg_decl_code = string.join(arg_decl_list, ",")
+ if not arg_decl_code and not pyrex:
+ arg_decl_code = "void"
+ trailer = ""
+ if (pyrex or for_display) and not self.return_type.is_pyobject:
+ if self.exception_value and self.exception_check:
+ trailer = " except? %s" % self.exception_value
+ elif self.exception_value:
+ trailer = " except %s" % self.exception_value
+ elif self.exception_check:
+ trailer = " except *"
+ if self.nogil:
+ trailer += " nogil"
+ cc = self.calling_convention_prefix()
+ if (not entity_code and cc) or entity_code.startswith("*"):
+ entity_code = "(%s%s)" % (cc, entity_code)
+ cc = ""
+ return self.return_type.declaration_code(
+ "%s%s(%s)%s" % (cc, entity_code, arg_decl_code, trailer),
+ for_display, dll_linkage, pyrex)
+
+ def function_header_code(self, func_name, arg_code):
+ return "%s%s(%s)" % (self.calling_convention_prefix(),
+ func_name, arg_code)
+
+ def signature_string(self):
+ s = self.declaration_code("")
+ return s
+
+
+class COverloadedFuncType(CType):
+ # return_type CType
+ # signatures [CFuncType]
+
+ is_cfunction = 1
+ is_overloaded = 1
+
+ def __init__(self, return_type, signatures):
+ self.return_type = return_type
+ self.signatures = signatures
+
+ def __str__(self):
+ return "COverloadedFuncType(%s, [%s])" % (self.return_type,
+ ", ".join(map(str, self.signatures)))
+
+
+class CFuncTypeArg:
+ # name string
+ # cname string
+ # type PyrexType
+ # pos source file position
+
+ def __init__(self, name, type, pos):
+ self.name = name
+ self.cname = Naming.var_prefix + name
+ self.type = type
+ self.pos = pos
+
+ def __repr__(self):
+ return "%s:%s" % (self.name, repr(self.type))
+
+ def declaration_code(self, for_display = 0):
+ return self.type.declaration_code(self.cname, for_display)
+
+
+class CStructOrUnionType(CType):
+ # name string
+ # cname string
+ # kind string "struct" or "union"
+ # scope StructOrUnionScope, or None if incomplete
+ # typedef_flag boolean
+ # cplus_constructor_type COverloadedFuncType
+
+ is_struct_or_union = 1
+ has_attributes = 1
+
+ def __init__(self, name, kind, scope, typedef_flag, cname):
+ self.name = name
+ self.cname = cname
+ self.kind = kind
+ self.typedef_flag = typedef_flag
+ self.set_scope(scope)
+
+ def __repr__(self):
+ return "<CStructOrUnionType %s %s%s>" % (self.name, self.cname,
+ ("", " typedef")[self.typedef_flag])
+
+ def set_scope(self, scope):
+ self.scope = scope
+ if scope and scope.is_cplus:
+ self.cplus_constructor_type = COverloadedFuncType(self,
+ scope.cplus_constructors)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return self.base_declaration_code(self.name, entity_code)
+ else:
+ if for_display:
+ base = self.name
+ elif self.typedef_flag:
+ base = self.cname
+ else:
+ base = "%s %s" % (self.kind, self.cname)
+ return self.base_declaration_code(public_decl(base, dll_linkage), entity_code)
+
+ def is_complete(self):
+ return self.scope is not None
+
+ def attributes_known(self):
+ return self.is_complete()
+
+
+class CEnumType(CType):
+ # name string
+ # cname string or None
+ # typedef_flag boolean
+
+ is_enum = 1
+ signed = 1
+ rank = -1 # Ranks below any integer type
+ to_py_function = "PyInt_FromLong"
+ from_py_function = "PyInt_AsLong"
+
+ def __init__(self, name, cname, typedef_flag):
+ self.name = name
+ self.cname = cname
+ self.values = []
+ self.typedef_flag = typedef_flag
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "<CEnumType %s %s%s>" % (self.name, self.cname,
+ ("", " typedef")[self.typedef_flag])
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return self.base_declaration_code(self.cname, entity_code)
+ else:
+ if self.typedef_flag:
+ base = self.cname
+ else:
+ base = "enum %s" % self.cname
+ return self.base_declaration_code(public_decl(base, dll_linkage), entity_code)
+
+
+class CStringType:
+ # Mixin class for C string types.
+
+ is_string = 1
+
+ to_py_function = "PyString_FromString"
+ from_py_function = "PyString_AsString"
+
+ def literal_code(self, value):
+ return '"%s"' % value
+
+
+class CCharArrayType(CStringType, CArrayType):
+ # C 'char []' type.
+
+ parsetuple_format = "s"
+ pymemberdef_typecode = "T_STRING_INPLACE"
+
+ def __init__(self, size):
+ CArrayType.__init__(self, c_char_type, size)
+
+
+class CCharPtrType(CStringType, CPtrType):
+ # C 'char *' type.
+
+ parsetuple_format = "s"
+ pymemberdef_typecode = "T_STRING"
+
+ def __init__(self):
+ CPtrType.__init__(self, c_char_type)
+
+
+class ErrorType(PyrexType):
+ # Used to prevent propagation of error messages.
+
+ is_error = 1
+ exception_value = "0"
+ exception_check = 0
+ to_py_function = "dummy"
+ from_py_function = "dummy"
+ parsetuple_format = "E"
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ return "<error>"
+
+ def same_as_resolved_type(self, other_type):
+ return 1
+
+
+py_object_type = PyObjectType()
+py_type_type = TypeWrapper(None) # Bootstrapping placeholder, filled later
+
+c_void_type = CVoidType()
+c_void_ptr_type = CPtrType(c_void_type)
+c_void_ptr_ptr_type = CPtrType(c_void_ptr_type)
+
+c_uchar_type = CIntType(0, 0, "unsigned char", "T_UBYTE")
+c_ushort_type = CIntType(1, 0, "unsigned short", "T_USHORT")
+c_uint_type = CUIntType(2, 0, "unsigned int", "T_UINT")
+c_ulong_type = CULongType(3, 0, "unsigned long", "T_ULONG")
+c_size_t_type = CPySSizeTType(4, 0, "size_t")
+c_ulonglong_type = CULongLongType(5, 0, "unsigned PY_LONG_LONG", "T_ULONGLONG")
+
+c_char_type = CIntType(0, 1, "char", "T_CHAR")
+c_short_type = CIntType(1, 1, "short", "T_SHORT")
+c_int_type = CIntType(2, 1, "int", "T_INT")
+c_long_type = CIntType(3, 1, "long", "T_LONG")
+c_longlong_type = CLongLongType(5, 1, "PY_LONG_LONG", "T_LONGLONG")
+
+c_schar_type = CIntType(0, 2, "signed char", "T_CHAR")
+c_sshort_type = CIntType(1, 2, "signed short", "T_SHORT")
+c_sint_type = CIntType(2, 2, "signed int", "T_INT")
+c_slong_type = CIntType(3, 2, "signed long", "T_LONG")
+c_py_ssize_t_type = CPySSizeTType(4, 2, "Py_ssize_t")
+c_slonglong_type = CLongLongType(5, 2, "signed PY_LONG_LONG", "T_LONGLONG")
+
+c_float_type = CFloatType(6, "float", "T_FLOAT")
+c_double_type = CFloatType(7, "double", "T_DOUBLE")
+c_longdouble_type = CFloatType(8, "long double")
+
+c_null_ptr_type = CNullPtrType(c_void_type)
+c_char_array_type = CCharArrayType(None)
+c_char_ptr_type = CCharPtrType()
+c_char_ptr_ptr_type = CPtrType(c_char_ptr_type)
+c_int_ptr_type = CPtrType(c_int_type)
+
+c_returncode_type = CIntType(2, 1, "int", "T_INT", is_returncode = 1)
+
+c_anon_enum_type = CAnonEnumType(-1, 1, "<enum>")
+
+error_type = ErrorType()
+
+# Signedness values
+UNSIGNED = 0
+NOSIGN = 1
+SIGNED = 2
+
+# Longness values
+SHORT = -1
+NOLEN = 0
+LONG = 1
+LONGLONG = 2
+
+modifiers_and_name_to_type = {
+ #(signedness, longness, name)
+ (UNSIGNED, NOLEN, "char"): c_uchar_type,
+ (UNSIGNED, SHORT, "int"): c_ushort_type,
+ (UNSIGNED, NOLEN, "int"): c_uint_type,
+ (UNSIGNED, LONG, "int"): c_ulong_type,
+ (UNSIGNED, LONGLONG, "int"): c_ulonglong_type,
+ (NOSIGN, NOLEN, "void"): c_void_type,
+ (NOSIGN, NOLEN, "char"): c_char_type,
+ (NOSIGN, SHORT, "int"): c_short_type,
+ (NOSIGN, NOLEN, "int"): c_int_type,
+ #(NOSIGN, NOLEN, "size_t"): c_size_t_type,
+ #(NOSIGN, NOLEN, "Py_ssize_t"): c_py_ssize_t_type,
+ (NOSIGN, LONG, "int"): c_long_type,
+ (NOSIGN, LONGLONG, "int"): c_longlong_type,
+ (NOSIGN, NOLEN, "float"): c_float_type,
+ (NOSIGN, NOLEN, "double"): c_double_type,
+ (NOSIGN, LONG, "double"): c_longdouble_type,
+ (NOSIGN, NOLEN, "object"): py_object_type,
+ (SIGNED, NOLEN, "char"): c_schar_type,
+ (SIGNED, SHORT, "int"): c_sshort_type,
+ (SIGNED, NOLEN, "int"): c_sint_type,
+ (SIGNED, LONG, "int"): c_slong_type,
+ (SIGNED, LONGLONG, "int"): c_slonglong_type,
+}
+
+def widest_numeric_type(type1, type2):
+ # Given two numeric types, return the narrowest type
+ # encompassing both of them.
+ if type1.is_enum and type2.is_enum:
+ widest_type = c_int_type
+ elif type1.rank < type2.rank:
+ widest_type = type2
+ elif type1.rank > type2.rank:
+ widest_type = type1
+ elif type1.signed < type2.signed:
+ widest_type = type1
+ else:
+ widest_type = type2
+ return widest_type
+
+def simple_c_type(signed, longness, name):
+ # Find type descriptor for simple type given name and modifiers.
+ # Returns None if arguments don't make sense.
+ return modifiers_and_name_to_type.get((signed, longness, name))
+
+def c_array_type(base_type, size):
+ # Construct a C array type.
+ if base_type is c_char_type:
+ return CCharArrayType(size)
+ else:
+ return CArrayType(base_type, size)
+
+def c_ptr_type(base_type):
+ # Construct a C pointer type.
+ if base_type is c_char_type:
+ return c_char_ptr_type
+ else:
+ return CPtrType(base_type)
+
+def public_decl(base, dll_linkage):
+ if dll_linkage:
+ return "%s(%s)" % (dll_linkage, base)
+ else:
+ return base
+
+def same_type(type1, type2):
+ return type1.same_as(type2)
+
+def assignable_from(type1, type2):
+ return type1.assignable_from(type2)
+
+def typecast(to_type, from_type, expr_code):
+ # Return expr_code cast to a C type which can be
+ # assigned to to_type, assuming its existing C type
+ # is from_type.
+ if to_type is from_type or \
+ same_type(to_type, from_type) or \
+ (not to_type.is_pyobject and assignable_from(to_type, from_type)):
+ return expr_code
+ else:
+ return to_type.cast_code(expr_code)
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Scanning.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Scanning.py
new file mode 100644
index 00000000..1fc92f99
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Scanning.py
@@ -0,0 +1,390 @@
+#
+# Pyrex Scanner
+#
+
+#import pickle
+import cPickle as pickle
+
+import os
+import platform
+import stat
+import sys
+from time import time
+
+from Pyrex import Plex
+from Pyrex.Plex import Scanner
+from Pyrex.Plex.Errors import UnrecognizedInput
+from Errors import CompileError, error
+from Lexicon import string_prefixes, make_lexicon
+
+plex_version = getattr(Plex, '_version', None)
+#print "Plex version:", plex_version ###
+
+debug_scanner = 0
+trace_scanner = 0
+scanner_debug_flags = 0
+scanner_dump_file = None
+binary_lexicon_pickle = 1
+notify_lexicon_unpickling = 0
+notify_lexicon_pickling = 1
+
+lexicon = None
+
+#-----------------------------------------------------------------
+
+def hash_source_file(path):
+ # Try to calculate a hash code for the given source file.
+ # Returns an empty string if the file cannot be accessed.
+ #print "Hashing", path ###
+ try:
+ from hashlib import md5
+ except ImportError:
+ from md5 import new as md5
+ try:
+ try:
+ f = open(path, "rU")
+ text = f.read()
+ except IOError, e:
+ print "Unable to hash scanner source file (%s)" % e
+ return ""
+ finally:
+ f.close()
+ # Normalise spaces/tabs. We don't know what sort of
+ # space-tab substitution the file may have been
+ # through, so we replace all spans of spaces and
+ # tabs by a single space.
+ import re
+ text = re.sub("[ \t]+", " ", text)
+ hash = md5(text).hexdigest()
+ return hash
+
+def open_pickled_lexicon(expected_hash):
+ # Try to open pickled lexicon file and verify that
+ # it matches the source file. Returns the opened
+ # file if successful, otherwise None. ???
+ f = None
+ result = None
+ if os.path.exists(lexicon_pickle):
+ try:
+ f = open(lexicon_pickle, "rb")
+ actual_hash = pickle.load(f)
+ if actual_hash == expected_hash:
+ result = f
+ f = None
+ else:
+ print "Lexicon hash mismatch:" ###
+ print " expected", expected_hash ###
+ print " got ", actual_hash ###
+ except IOError, e:
+ print "Warning: Unable to read pickled lexicon", lexicon_pickle
+ print e
+ if f:
+ f.close()
+ return result
+
+def try_to_unpickle_lexicon():
+ global lexicon, lexicon_pickle, lexicon_hash
+ dir = os.path.dirname(__file__)
+ source_file = os.path.join(dir, "Lexicon.py")
+ lexicon_hash = hash_source_file(source_file)
+ lexicon_pickle = os.path.join(dir, "Lexicon.pickle")
+ f = open_pickled_lexicon(expected_hash = lexicon_hash)
+ if f:
+ if notify_lexicon_unpickling:
+ t0 = time()
+ print "Unpickling lexicon..."
+ lexicon = pickle.load(f)
+ f.close()
+ if notify_lexicon_unpickling:
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def create_new_lexicon():
+ global lexicon
+ t0 = time()
+ print "Creating lexicon..."
+ lexicon = make_lexicon()
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def pickle_lexicon():
+ f = None
+ try:
+ f = open(lexicon_pickle, "wb")
+ except IOError:
+ print "Warning: Unable to save pickled lexicon in", lexicon_pickle
+ if f:
+ if notify_lexicon_pickling:
+ t0 = time()
+ print "Pickling lexicon..."
+ pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
+ pickle.dump(lexicon, f, binary_lexicon_pickle)
+ f.close()
+ if notify_lexicon_pickling:
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def get_lexicon():
+ global lexicon
+ if not lexicon and plex_version is None:
+ try_to_unpickle_lexicon()
+ if not lexicon:
+ create_new_lexicon()
+ if plex_version is None:
+ pickle_lexicon()
+ return lexicon
+
+#------------------------------------------------------------------
+
+reserved_words = [
+ "global", "include", "ctypedef", "cdef", "def", "class",
+ "print", "del", "pass", "break", "continue", "return",
+ "raise", "import", "exec", "try", "except", "finally",
+ "while", "if", "elif", "else", "for", "in", "assert",
+ "and", "or", "not", "is", "in", "lambda", "from",
+ "NULL", "cimport", "with", "DEF", "IF", "ELIF", "ELSE"
+]
+
+class Method:
+
+ def __init__(self, name):
+ self.name = name
+ self.__name__ = name # for Plex tracing
+
+ def __call__(self, stream, text):
+ return getattr(stream, self.name)(text)
+
+#------------------------------------------------------------------
+
+def build_resword_dict():
+ d = {}
+ for word in reserved_words:
+ d[word] = 1
+ return d
+
+#------------------------------------------------------------------
+
+class CompileTimeScope(object):
+
+ def __init__(self, outer = None):
+ self.entries = {}
+ self.outer = outer
+
+ def declare(self, name, value):
+ self.entries[name] = value
+
+ def lookup_here(self, name):
+ return self.entries[name]
+
+ def lookup(self, name):
+ try:
+ return self.lookup_here(name)
+ except KeyError:
+ outer = self.outer
+ if outer:
+ return outer.lookup(name)
+ else:
+ raise
+
+def initial_compile_time_env():
+ benv = CompileTimeScope()
+ names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE',
+ 'UNAME_VERSION', 'UNAME_MACHINE')
+ for name, value in zip(names, platform.uname()):
+ benv.declare(name, value)
+ import __builtin__
+ names = ('False', 'True',
+ 'abs', 'bool', 'chr', 'cmp', 'complex', 'dict', 'divmod', 'enumerate',
+ 'float', 'hash', 'hex', 'int', 'len', 'list', 'long', 'map', 'max', 'min',
+ 'oct', 'ord', 'pow', 'range', 'reduce', 'repr', 'round', 'slice', 'str',
+ 'sum', 'tuple', 'xrange', 'zip')
+ for name in names:
+ benv.declare(name, getattr(__builtin__, name))
+ denv = CompileTimeScope(benv)
+ return denv
+
+#------------------------------------------------------------------
+
+class PyrexScanner(Scanner):
+ # context Context Compilation context
+ # type_names set Identifiers to be treated as type names
+ # included_files [string] Files included with 'include' statement
+ # compile_time_env dict Environment for conditional compilation
+ # compile_time_eval boolean In a true conditional compilation context
+ # compile_time_expr boolean In a compile-time expression context
+
+ resword_dict = build_resword_dict()
+
+ def __init__(self, file, filename, parent_scanner = None,
+ scope = None, context = None):
+ Scanner.__init__(self, get_lexicon(), file, filename)
+ if parent_scanner:
+ self.context = parent_scanner.context
+ self.type_names = parent_scanner.type_names
+ self.included_files = parent_scanner.included_files
+ self.compile_time_env = parent_scanner.compile_time_env
+ self.compile_time_eval = parent_scanner.compile_time_eval
+ self.compile_time_expr = parent_scanner.compile_time_expr
+ else:
+ self.context = context
+ self.type_names = scope.type_names
+ self.included_files = scope.pyrex_include_files
+ self.compile_time_env = initial_compile_time_env()
+ self.compile_time_eval = 1
+ self.compile_time_expr = 0
+ self.trace = trace_scanner
+ self.indentation_stack = [0]
+ self.indentation_char = None
+ self.bracket_nesting_level = 0
+ self.begin('INDENT')
+ self.sy = ''
+ self.next()
+
+ def current_level(self):
+ return self.indentation_stack[-1]
+
+ def open_bracket_action(self, text):
+ self.bracket_nesting_level = self.bracket_nesting_level + 1
+ return text
+
+ def close_bracket_action(self, text):
+ self.bracket_nesting_level = self.bracket_nesting_level - 1
+ return text
+
+ def newline_action(self, text):
+ if self.bracket_nesting_level == 0:
+ self.begin('INDENT')
+ self.produce('NEWLINE', '')
+
+ string_states = {
+ "'": 'SQ_STRING',
+ '"': 'DQ_STRING',
+ "'''": 'TSQ_STRING',
+ '"""': 'TDQ_STRING'
+ }
+
+ def begin_string_action(self, text):
+ if text[:1] in string_prefixes:
+ text = text[1:]
+ self.begin(self.string_states[text])
+ self.produce('BEGIN_STRING')
+
+ def end_string_action(self, text):
+ self.begin('')
+ self.produce('END_STRING')
+
+ def unclosed_string_action(self, text):
+ self.end_string_action(text)
+ self.error("Unclosed string literal")
+
+ def indentation_action(self, text):
+ self.begin('')
+ # Indentation within brackets should be ignored.
+ #if self.bracket_nesting_level > 0:
+ # return
+ # Check that tabs and spaces are being used consistently.
+ if text:
+ c = text[0]
+ #print "Scanner.indentation_action: indent with", repr(c) ###
+ if self.indentation_char is None:
+ self.indentation_char = c
+ #print "Scanner.indentation_action: setting indent_char to", repr(c)
+ else:
+ if self.indentation_char <> c:
+ self.error("Mixed use of tabs and spaces")
+ if text.replace(c, "") <> "":
+ self.error("Mixed use of tabs and spaces")
+ # Figure out how many indents/dedents to do
+ current_level = self.current_level()
+ new_level = len(text)
+ #print "Changing indent level from", current_level, "to", new_level ###
+ if new_level == current_level:
+ return
+ elif new_level > current_level:
+ #print "...pushing level", new_level ###
+ self.indentation_stack.append(new_level)
+ self.produce('INDENT', '')
+ else:
+ while new_level < self.current_level():
+ #print "...popping level", self.indentation_stack[-1] ###
+ self.indentation_stack.pop()
+ self.produce('DEDENT', '')
+ #print "...current level now", self.current_level() ###
+ if new_level <> self.current_level():
+ self.error("Inconsistent indentation")
+
+ def eof_action(self, text):
+ while len(self.indentation_stack) > 1:
+ self.produce('DEDENT', '')
+ self.indentation_stack.pop()
+ self.produce('EOF', '')
+
+ def next(self):
+ try:
+ sy, systring = self.read()
+ except UnrecognizedInput:
+ self.error("Unrecognized character")
+ if sy == 'IDENT' and systring in self.resword_dict:
+ sy = systring
+ self.sy = sy
+ self.systring = systring
+ if debug_scanner:
+ _, line, col = self.position()
+ if not self.systring or self.sy == self.systring:
+ t = self.sy
+ else:
+ t = "%s %s" % (self.sy, self.systring)
+ print "--- %3d %2d %s" % (line, col, t)
+
+ def put_back(self, sy, systring):
+ self.unread(self.sy, self.systring)
+ self.sy = sy
+ self.systring = systring
+
+ def unread(self, token, value):
+ # This method should be added to Plex
+ self.queue.insert(0, (token, value))
+
+ def add_type_name(self, name):
+ self.type_names[name] = 1
+
+ def looking_at_type_name(self):
+ return self.sy == 'IDENT' and self.systring in self.type_names
+
+ def error(self, message, pos = None):
+ if pos is None:
+ pos = self.position()
+ if self.sy == 'INDENT':
+ error(pos, "Possible inconsistent indentation")
+ raise error(pos, message)
+
+ def expect(self, what, message = None):
+ if self.sy == what:
+ self.next()
+ else:
+ self.expected(what, message)
+
+ def expect_keyword(self, what, message = None):
+ if self.sy == 'IDENT' and self.systring == what:
+ self.next()
+ else:
+ self.expected(what, message)
+
+ def expected(self, what, message):
+ if message:
+ self.error(message)
+ else:
+ self.error("Expected '%s'" % what)
+
+ def expect_indent(self):
+ self.expect('INDENT',
+ "Expected an increase in indentation level")
+
+ def expect_dedent(self):
+ self.expect('DEDENT',
+ "Expected a decrease in indentation level")
+
+ def expect_newline(self, message = "Expected a newline"):
+ # Expect either a newline or end of file
+ if self.sy <> 'EOF':
+ self.expect('NEWLINE', message)
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Symtab.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Symtab.py
new file mode 100644
index 00000000..b56b8531
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Symtab.py
@@ -0,0 +1,1342 @@
+#
+# Pyrex - Symbol Table
+#
+
+from Errors import warning, error, InternalError
+import Options
+import Naming
+import PyrexTypes
+from PyrexTypes import \
+ py_object_type, py_type_type, \
+ c_int_type, c_char_array_type, \
+ CEnumType, CStructOrUnionType, PyExtensionType
+from TypeSlots import \
+ pyfunction_signature, pymethod_signature, \
+ get_special_method_signature, get_property_accessor_signature
+
+class Entry:
+ # A symbol table entry in a Scope or ModuleNamespace.
+ #
+ # name string Python name of entity
+ # cname string C name of entity
+ # type PyrexType Type of entity
+ # ctype PyrexType Declared C type, if different from Pyrex type
+ # doc string Doc string
+ # init string Initial value
+ # visibility 'private' or 'public' or 'extern'
+ # is_builtin boolean Is an entry in the Python builtins dict
+ # is_cglobal boolean Is a C global variable
+ # is_pyglobal boolean Is a Python module-level variable or
+ # class attribute during class construction
+ # is_variable boolean Is a variable
+ # is_cfunction boolean Is a C function
+ # is_cmethod boolean Is a C method of an extension type
+ # is_builtin_method boolean Is a method corresponding to a Python/C API func
+ # is_type boolean Is a type definition
+ # is_const boolean Is a constant
+ # is_property boolean Is a property of an extension type:
+ # #doc_cname string or None C const holding the docstring
+ # getter_cname string C func for getting property
+ # setter_cname string C func for setting or deleting property
+ # is_self_arg boolean Is the "self" arg of an exttype method
+ # is_readonly boolean Can't be assigned to
+ # func_cname string C func implementing Python func
+ # pos position Source position where declared
+ # namespace_cname string If is_pyglobal, the C variable
+ # holding its home namespace
+ # pymethdef_cname string PyMethodDef structure
+ # signature Signature Arg & return types for Python func
+ # init_to_none boolean True if initial value should be None
+ # as_variable Entry Alternative interpretation of extension
+ # type name or builtin C function as a variable
+ # xdecref_cleanup boolean Use Py_XDECREF for error cleanup
+ # in_cinclude boolean Suppress C declaration code
+ # enum_values [Entry] For enum types, list of values
+ # qualified_name string "modname.funcname" or "modname.classname"
+ # or "modname.classname.funcname"
+ # is_declared_generic boolean Is declared as PyObject * even though its
+ # type is an extension type
+ # as_module None Module scope, if a cimported module
+ # is_inherited boolean Is an inherited attribute of an extension type
+ # #interned_cname string C name of interned name string
+ # pystring_cname string C name of Python version of string literal
+ # #is_interned boolean For string const entries, value is interned
+ # used boolean
+ # is_special boolean Is a special method or property accessor
+ # of an extension type
+ # defined_in_pxd boolean Is defined in a .pxd file (not just declared)
+ # api boolean Generate C API for C class or function
+ # utility_code string Utility code needed when this entry is used
+
+ borrowed = 0
+ init = ""
+ visibility = 'private'
+ ctype = None
+ is_builtin = 0
+ is_cglobal = 0
+ is_pyglobal = 0
+ is_variable = 0
+ is_cfunction = 0
+ is_cmethod = 0
+ is_builtin_method = 0
+ is_type = 0
+ is_const = 0
+ is_property = 0
+ doc_cname = None
+ getter_cname = None
+ setter_cname = None
+ is_self_arg = 0
+ is_declared_generic = 0
+ is_readonly = 0
+ func_cname = None
+ doc = None
+ init_to_none = 0
+ as_variable = None
+ xdecref_cleanup = 0
+ in_cinclude = 0
+ as_module = None
+ is_inherited = 0
+ #interned_cname = None
+ pystring_cname = None
+ is_interned = 0
+ used = 0
+ is_special = 0
+ defined_in_pxd = 0
+ api = 0
+ utility_code = None
+
+ def __init__(self, name, cname, type, pos = None, init = None):
+ self.name = name
+ self.cname = cname
+ self.type = type
+ self.pos = pos
+ self.init = init
+
+ def redeclared(self, pos):
+ error(pos, "'%s' does not match previous declaration" % self.name)
+ error(self.pos, "Previous declaration is here")
+
+
+class Scope:
+ # name string Unqualified name
+ # outer_scope Scope or None Enclosing scope
+ # entries {string : Entry} Python name to entry, non-types
+ # const_entries [Entry] Constant entries
+ # type_entries [Entry] Struct/union/enum/typedef/exttype entries
+ # sue_entries [Entry] Struct/union/enum entries
+ # arg_entries [Entry] Function argument entries
+ # var_entries [Entry] User-defined variable entries
+ # pyfunc_entries [Entry] Python function entries
+ # cfunc_entries [Entry] C function entries
+ # c_class_entries [Entry] All extension type entries
+ # temp_entries [Entry] Temporary variable entries
+ # free_temp_entries [Entry] Temp variables currently unused
+ # temp_counter integer Counter for naming temp vars
+ # cname_to_entry {string : Entry} Temp cname to entry mapping
+ # pow_function_used boolean The C pow() function is used
+ # return_type PyrexType or None Return type of function owning scope
+ # is_py_class_scope boolean Is a Python class scope
+ # is_c_class_scope boolean Is an extension type scope
+ # scope_prefix string Disambiguator for C names
+ # in_cinclude boolean Suppress C declaration code
+ # qualified_name string "modname" or "modname.classname"
+ # #pystring_entries [Entry] String const entries newly used as
+ # # Python strings in this scope
+ # nogil boolean In a nogil section
+ # is_cplus boolean Is a C++ struct namespace
+ # reraise_used boolean Reraise statement encountered
+
+ is_py_class_scope = 0
+ is_c_class_scope = 0
+ scope_prefix = ""
+ in_cinclude = 0
+ nogil = 0
+ return_type = None
+ reraise_used = 0
+
+ def __init__(self, name, outer_scope, parent_scope):
+ # The outer_scope is the next scope in the lookup chain.
+ # The parent_scope is used to derive the qualified name of this scope.
+ self.name = name
+ self.outer_scope = outer_scope
+ self.parent_scope = parent_scope
+ mangled_name = "%d%s_" % (len(name), name)
+ qual_scope = self.qualifying_scope()
+ if qual_scope:
+ self.qualified_name = qual_scope.qualify_name(name)
+ self.scope_prefix = qual_scope.scope_prefix + mangled_name
+ else:
+ self.qualified_name = name
+ self.scope_prefix = mangled_name
+ self.entries = {}
+ self.const_entries = []
+ self.type_entries = []
+ self.sue_entries = []
+ self.arg_entries = []
+ self.var_entries = []
+ self.pyfunc_entries = []
+ self.cfunc_entries = []
+ self.c_class_entries = []
+ self.defined_c_classes = []
+ self.imported_c_classes = {}
+ self.temp_entries = []
+ self.free_temp_entries = []
+ self.temp_counter = 1
+ self.cname_to_entry = {}
+ self.pow_function_used = 0
+ #self.pystring_entries = []
+
+ def __str__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
+
+# def intern(self, name):
+# return self.global_scope().intern(name)
+
+ def qualifying_scope(self):
+ return self.parent_scope
+
+ def mangle(self, prefix, name = None):
+ if name:
+ return "%s%s%s" % (prefix, self.scope_prefix, name)
+ else:
+ return self.parent_scope.mangle(prefix, self.name)
+
+ def mangle_internal(self, name):
+ # Mangle an internal name so as not to clash with any
+ # user-defined name in this scope.
+ prefix = "%s%s_" % (Naming.pyrex_prefix, name)
+ return self.mangle(prefix)
+ #return self.parent_scope.mangle(prefix, self.name)
+
+ def global_scope(self):
+ # Return the module-level scope containing this scope.
+ return self.outer_scope.global_scope()
+
+ def declare(self, name, cname, type, pos):
+ # Create new entry, and add to dictionary if
+ # name is not None. Reports an error if already
+ # declared.
+ dict = self.entries
+ if name and dict.has_key(name):
+ error(pos, "'%s' already declared" % name)
+ entry = Entry(name, cname, type, pos = pos)
+ entry.in_cinclude = self.in_cinclude
+ if name:
+ entry.qualified_name = self.qualify_name(name)
+ dict[name] = entry
+ return entry
+
+ def qualify_name(self, name):
+ return "%s.%s" % (self.qualified_name, name)
+
+ def declare_const(self, name, type, value, pos, cname = None):
+ # Add an entry for a named constant.
+ if not cname:
+ if self.in_cinclude:
+ cname = name
+ else:
+ cname = self.mangle(Naming.enum_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_const = 1
+ entry.value = value
+ return entry
+
+ def declare_type(self, name, type, pos,
+ cname = None, visibility = 'private', defining = 1):
+ # Add an entry for a type definition.
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.visibility = visibility
+ entry.is_type = 1
+ if defining:
+ self.type_entries.append(entry)
+ return entry
+
+ def declare_typedef(self, name, base_type, pos, cname = None,
+ visibility = 'private'):
+ if not cname:
+ if self.in_cinclude or visibility == 'public':
+ cname = name
+ else:
+ cname = self.mangle(Naming.type_prefix, name)
+ type = PyrexTypes.CTypedefType(cname, base_type)
+ entry = self.declare_type(name, type, pos, cname, visibility)
+ type.qualified_name = entry.qualified_name
+ return entry
+
+ def declare_struct_or_union(self, name, kind, scope,
+ typedef_flag, pos, cname = None, visibility = 'private'):
+ # Add an entry for a struct or union definition.
+ if not cname:
+ if self.in_cinclude or visibility == 'public':
+ cname = name
+ else:
+ cname = self.mangle(Naming.type_prefix, name)
+ entry = self.lookup_here(name)
+ if not entry:
+ type = CStructOrUnionType(name, kind, scope, typedef_flag, cname)
+ entry = self.declare_type(name, type, pos, cname,
+ visibility = visibility, defining = scope is not None)
+ self.sue_entries.append(entry)
+ else:
+ if not (entry.is_type and entry.type.is_struct_or_union
+ and entry.type.kind == kind):
+ entry.redeclared(pos)
+ elif scope and entry.type.scope:
+ error(pos, "'%s' already defined" % name)
+ else:
+ self.check_previous_typedef_flag(entry, typedef_flag, pos)
+ self.check_previous_visibility(entry, visibility, pos)
+ if scope:
+ entry.pos = pos
+ entry.type.set_scope(scope)
+ self.type_entries.append(entry)
+ if not scope and not entry.type.scope:
+ self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
+ return entry
+
+ def check_previous_typedef_flag(self, entry, typedef_flag, pos):
+ if typedef_flag <> entry.type.typedef_flag:
+ error(pos, "'%s' previously declared using '%s'" % (
+ entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
+
+ def check_previous_visibility(self, entry, visibility, pos):
+ if entry.visibility <> visibility:
+ error(pos, "'%s' previously declared as '%s'" % (
+ entry.name, entry.visibility))
+
+ def declare_enum(self, name, pos, cname, typedef_flag,
+ visibility = 'private'):
+ if name:
+ if not cname:
+ if self.in_cinclude or visibility == 'public':
+ cname = name
+ else:
+ cname = self.mangle(Naming.type_prefix, name)
+ type = CEnumType(name, cname, typedef_flag)
+ else:
+ type = PyrexTypes.c_anon_enum_type
+ entry = self.declare_type(name, type, pos, cname = cname,
+ visibility = visibility)
+ entry.enum_values = []
+ self.sue_entries.append(entry)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a variable.
+ if not cname:
+ if visibility <> 'private':
+ cname = name
+ else:
+ cname = self.mangle(Naming.var_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ entry.visibility = visibility
+ return entry
+
+ def declare_builtin(self, name, pos):
+ return self.outer_scope.declare_builtin(name, pos)
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a Python function.
+ entry = self.declare_var(name, py_object_type, pos)
+ entry.signature = pyfunction_signature
+ self.pyfunc_entries.append(entry)
+ return entry
+
+ def register_pyfunction(self, entry):
+ self.pyfunc_entries.append(entry)
+
+ def declare_cfunction(self, name, type, pos,
+ cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
+ # Add an entry for a C function.
+ entry = self.lookup_here(name)
+ if entry:
+ if visibility <> 'private' and visibility <> entry.visibility:
+ error(pos, "Function '%s' previously declared as '%s'" % (
+ name, entry.visibility))
+ if not entry.type.same_as(type):
+ error(pos, "Function signature does not match previous declaration")
+ else:
+ if not cname:
+ if api or visibility <> 'private':
+ cname = name
+ else:
+ cname = self.mangle(Naming.func_prefix, name)
+ entry = self.add_cfunction(name, type, pos, cname, visibility)
+ entry.func_cname = cname
+ if in_pxd and visibility <> 'extern':
+ entry.defined_in_pxd = 1
+ if api:
+ entry.api = 1
+ if not defining and not in_pxd and visibility <> 'extern':
+ error(pos, "Non-extern C function declared but not defined")
+ return entry
+
+ def add_cfunction(self, name, type, pos, cname, visibility = 'private'):
+ # Add a C function entry without giving it a func_cname.
+ entry = self.declare(name, cname, type, pos)
+ entry.is_cfunction = 1
+ entry.visibility = visibility
+ self.cfunc_entries.append(entry)
+ return entry
+
+ def attach_var_entry_to_c_class(self, entry):
+ # The name of an extension class has to serve as both a type name and a
+ # variable name holding the type object. It is represented in the symbol
+ # table by a type entry with a variable entry attached to it. For the
+ # variable entry, we use a read-only C global variable whose name is an
+ # expression that refers to the type object.
+ var_entry = Entry(name = entry.name,
+ #type = py_object_type,
+ type = py_type_type,
+ pos = entry.pos,
+ #cname = "((PyObject*)%s)" % entry.type.typeptr_cname
+ cname = entry.type.typeptr_cname)
+ var_entry.is_variable = 1
+ var_entry.is_cglobal = 1
+ var_entry.is_readonly = 1
+ entry.as_variable = var_entry
+
+ def find(self, name, pos):
+ # Look up name, report error if not found.
+ entry = self.lookup(name)
+ if entry:
+ return entry
+ else:
+ error(pos, "'%s' is not declared" % name)
+
+ def find_imported_module(self, path, pos):
+ # Look up qualified name, must be a module, report error if not found.
+ # Path is a list of names.
+ scope = self
+ for name in path:
+ entry = scope.find(name, pos)
+ if not entry:
+ return None
+ if entry.as_module:
+ scope = entry.as_module
+ else:
+ error(pos, "'%s' is not a cimported module" % scope.qualified_name)
+ return None
+ return scope
+
+ def find_qualified_name(self, module_and_name, pos):
+ # Look up qualified name, report error if not found.
+ # module_and_name = [path, name] where path is a list of names.
+ module_path, name = module_and_name
+ scope = self.find_imported_module(module_path, pos)
+ if scope:
+ entry = scope.lookup_here(name)
+ if not entry:
+ mess = "'%s' is not declared" % name
+ if module_path:
+ mess = "%s in module '%s'" % (mess, ".".join(module_path))
+ error(pos, mess)
+ return entry
+
+ def lookup(self, name):
+ # Look up name in this scope or an enclosing one.
+ # Return None if not found.
+ return (self.lookup_here(name)
+ or (self.outer_scope and self.outer_scope.lookup(name))
+ or None)
+
+ def lookup_here(self, name):
+ # Look up in this scope only, return None if not found.
+ return self.entries.get(name, None)
+
+ def lookup_target(self, name):
+ # Look up name in this scope only. Declare as Python
+ # variable if not found.
+ entry = self.lookup_here(name)
+ if not entry:
+ entry = self.declare_var(name, py_object_type, None)
+ return entry
+
+# def add_string_const(self, value):
+# # Add an entry for a string constant.
+# cname = self.new_const_cname()
+# entry = Entry("", cname, c_char_array_type, init = value)
+# entry.used = 1
+# self.const_entries.append(entry)
+# return entry
+
+# def get_string_const(self, value):
+# # Get entry for string constant. Returns an existing
+# # one if possible, otherwise creates a new one.
+# genv = self.global_scope()
+# entry = genv.string_to_entry.get(value)
+# if not entry:
+# entry = self.add_string_const(value)
+# genv.string_to_entry[value] = entry
+# return entry
+
+# def add_py_string(self, entry):
+# # If not already done, allocate a C name for a Python version of
+# # a string literal, and add it to the list of Python strings to
+# # be created at module init time. If the string resembles a
+# # Python identifier, it will be interned.
+# if not entry.pystring_cname:
+# value = entry.init
+# if identifier_pattern.match(value):
+# entry.pystring_cname = self.intern(value)
+# entry.is_interned = 1
+# else:
+# entry.pystring_cname = entry.cname + "p"
+# self.pystring_entries.append(entry)
+# self.global_scope().all_pystring_entries.append(entry)
+
+# def new_const_cname(self):
+# # Create a new globally-unique name for a constant.
+# return self.global_scope().new_const_cname()
+
+ def allocate_temp(self, type):
+ # Allocate a temporary variable of the given type from the
+ # free list if available, otherwise create a new one.
+ # Returns the cname of the variable.
+ for entry in self.free_temp_entries:
+ if entry.type == type:
+ self.free_temp_entries.remove(entry)
+ return entry.cname
+ n = self.temp_counter
+ self.temp_counter = n + 1
+ cname = "%s%d" % (Naming.pyrex_prefix, n)
+ entry = Entry("", cname, type)
+ entry.used = 1
+ if type.is_pyobject:
+ entry.init = "0"
+ self.cname_to_entry[entry.cname] = entry
+ self.temp_entries.append(entry)
+ return entry.cname
+
+ def allocate_temp_pyobject(self):
+ # Allocate a temporary PyObject variable.
+ return self.allocate_temp(py_object_type)
+
+ def release_temp(self, cname):
+ # Release a temporary variable for re-use.
+ if not cname: # can happen when type of an expr is void
+ return
+ entry = self.cname_to_entry[cname]
+ if entry in self.free_temp_entries:
+ raise InternalError("Temporary variable %s released more than once"
+ % cname)
+ self.free_temp_entries.append(entry)
+
+ def temps_in_use(self):
+ # Return a new list of temp entries currently in use.
+ return [entry for entry in self.temp_entries
+ if entry not in self.free_temp_entries]
+
+# def use_utility_code(self, new_code):
+# self.global_scope().use_utility_code(new_code)
+
+ def generate_library_function_declarations(self, code):
+ # Generate extern decls for C library funcs used.
+ #if self.pow_function_used:
+ # code.putln("%s double pow(double, double);" % Naming.extern_c_macro)
+ pass
+
+ def defines_any(self, names):
+ # Test whether any of the given names are
+ # defined in this scope.
+ for name in names:
+ if name in self.entries:
+ return 1
+ return 0
+
+
+class BuiltinScope(Scope):
+ # The builtin namespace.
+ #
+ # type_names {string : 1} Set of type names (used during parsing)
+
+ def __init__(self):
+ Scope.__init__(self, "__builtin__", None, None)
+ self.type_names = {}
+
+ def declare_builtin(self, name, pos):
+ entry = self.declare(name, name, py_object_type, pos)
+ entry.is_builtin = 1
+ return entry
+
+ def declare_builtin_constant(self, name, type, cname, ctype = None):
+ entry = self.declare(name, cname, type, None)
+ if ctype:
+ entry.ctype = ctype
+ entry.is_variable = 1
+ entry.is_cglobal = 1
+ entry.is_readonly = 1
+ return entry
+
+ def declare_builtin_c_type(self, name, type):
+ entry = self.declare_type(name, type, pos = None)
+ self.type_names[name] = 1
+ return entry
+
+ def declare_builtin_cfunction(self, name, type, cname, python_equiv = None,
+ utility_code = None):
+ # If python_equiv == "*", the Python equivalent has the same name
+ # as the entry, otherwise it has the name specified by python_equiv.
+ entry = self.declare_cfunction(name, type, None, cname)
+ entry.utility_code = utility_code
+ if python_equiv:
+ if python_equiv == "*":
+ python_equiv = name
+ var_entry = Entry(python_equiv, python_equiv, py_object_type)
+ var_entry.is_variable = 1
+ var_entry.is_builtin = 1
+ entry.as_variable = var_entry
+ return entry
+
+ def declare_builtin_class(self, name, objstruct_cname, typeobj_cname):
+ type = PyExtensionType(name, typedef_flag = 1, base_type = None)
+ type.module_name = "__builtin__"
+ type.typeptr_cname = "(&%s)" % typeobj_cname
+ type.objstruct_cname = objstruct_cname
+ type.is_builtin = 1
+ scope = CClassScope(name = name, outer_scope = self, visibility = "extern")
+ type.set_scope(scope)
+ entry = self.declare_type(name, type, pos = None, visibility = "extern",
+ defining = 0)
+ self.attach_var_entry_to_c_class(entry)
+ self.type_names[name] = 1
+ return entry
+
+ def find_type(self, name):
+ # Used internally during initialisation, always succeeds
+ entry = self.lookup_here(name)
+ return entry.type
+
+
+class ModuleScope(Scope):
+ # module_name string Python name of the module
+ # module_cname string C name of Python module object
+ # #module_dict_cname string C name of module dict object
+ # method_table_cname string C name of method table
+ # doc string Module doc string
+ # python_include_files [string] Standard Python headers to be included
+ # include_files [string] Other C headers to be included
+ # context Context
+ # pxd_file_loaded boolean Corresponding .pxd file has been processed
+ # cimported_modules [ModuleScope] Modules imported with cimport
+ # types_imported {PyrexType : 1} Set of types for which import code generated
+ # type_names {string : 1} Set of type names (used during parsing)
+ # pyrex_include_files [string] Pyrex sources included with 'include'
+ # gil_used boolean True if GIL is acquired/released anywhere
+
+ gil_used = 0
+
+ def __init__(self, name, parent_module, context):
+ outer_scope = context.find_submodule("__builtin__")
+ Scope.__init__(self, name, outer_scope, parent_module)
+ self.module_name = name
+ self.context = context
+ self.module_cname = Naming.module_cname
+ self.module_dict_cname = Naming.moddict_cname
+ self.method_table_cname = Naming.methtable_cname
+ self.doc = ""
+ self.python_include_files = ["Python.h", "structmember.h"]
+ self.include_files = []
+ self.type_names = self.outer_scope.type_names.copy()
+ self.pxd_file_loaded = 0
+ self.cimported_modules = []
+ self.types_imported = {}
+ self.pyrex_include_files = []
+
+# def qualifying_scope(self):
+# return self.parent_module
+
+ def global_scope(self):
+ return self
+
+ def declare_builtin(self, name, pos):
+ entry = Scope.declare_builtin(self, name, pos)
+ #entry.interned_cname = self.intern(name)
+ return entry
+
+# def intern(self, name):
+# intern_map = self.intern_map
+# cname = intern_map.get(name)
+# if not cname:
+# cname = Naming.interned_prefix + name
+# intern_map[name] = cname
+# self.interned_names.append(name)
+# return cname
+
+ def add_include_file(self, filename):
+ if filename not in self.python_include_files \
+ and filename not in self.include_files:
+ self.include_files.append(filename)
+
+ def add_imported_module(self, scope):
+ #print "add_imported_module:", scope, "to", self ###
+ if scope not in self.cimported_modules:
+ self.cimported_modules.append(scope)
+
+ def add_imported_entry(self, name, entry, pos):
+ if entry not in self.entries:
+ self.entries[name] = entry
+ else:
+ error(pos, "'%s' already declared" % name)
+
+ def declare_module(self, name, scope, pos):
+ # Declare a cimported module. This is represented as a
+ # Python module-level variable entry with a module
+ # scope attached to it. Reports an error and returns
+ # None if previously declared as something else.
+ entry = self.lookup_here(name)
+ if entry:
+ if entry.is_pyglobal and entry.as_module is scope:
+ return entry # Already declared as the same module
+ if not (entry.is_pyglobal and not entry.as_module):
+ #error(pos, "'%s' redeclared" % name)
+ entry.redeclared(pos)
+ return None
+ else:
+ entry = self.declare_var(name, py_object_type, pos)
+ #print "declare_module:", scope, "in", self ###
+ entry.as_module = scope
+ #self.cimported_modules.append(scope)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a global variable. If it is a Python
+ # object type, and not declared with cdef, it will live
+ # in the module dictionary, otherwise it will be a C
+ # global variable.
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ if not visibility in ('private', 'public', 'extern'):
+ error(pos, "Module-level variable cannot be declared %s" % visibility)
+ if not is_cdef:
+ if not (type.is_pyobject and not type.is_extension_type):
+ raise InternalError(
+ "Non-cdef global variable is not a generic Python object")
+ entry.is_pyglobal = 1
+ entry.namespace_cname = self.module_cname
+ #if Options.intern_names:
+ # entry.interned_cname = self.intern(name)
+ else:
+ entry.is_cglobal = 1
+ self.var_entries.append(entry)
+ return entry
+
+ def declare_global(self, name, pos):
+ entry = self.lookup_here(name)
+ if not entry:
+ self.declare_var(name, py_object_type, pos)
+
+ def add_default_value(self, type):
+ # Add an entry for holding a function argument
+ # default value.
+ cname = "%s%d" % (Naming.default_prefix, self.default_counter)
+ self.default_counter += 1
+ entry = Entry("", cname, type)
+ self.default_entries.append(entry)
+ return entry
+
+# def new_const_cname(self):
+# # Create a new globally-unique name for a constant.
+# n = self.const_counter
+# self.const_counter = n + 1
+# return "%s%d" % (Naming.const_prefix, n)
+
+# def use_utility_code(self, new_code):
+# # Add string to list of utility code to be included,
+# # if not already there (tested using 'is').
+# for old_code in self.utility_code_used:
+# if old_code is new_code:
+# return
+# self.utility_code_used.append(new_code)
+
+ def declare_c_class(self, name, pos, defining = 0, implementing = 0,
+ module_name = None, base_type = None, visibility = 'private',
+ typedef_flag = 0, api = 0, options = None):
+ #
+ # Look for previous declaration as a type
+ #
+ #print "declare_c_class:", name, "in", self ###
+ entry = self.lookup_here(name)
+ if entry:
+ type = entry.type
+ if not (entry.is_type and type.is_extension_type):
+ entry = None # Will cause redeclaration and produce an error
+ else:
+ scope = type.scope
+ defined = scope and scope.defined
+ definitive = defining or (implementing and not defined)
+ self.check_previous_typedef_flag(entry, typedef_flag, pos)
+ if base_type or definitive:
+ if type.base_type and base_type is not type.base_type:
+ error(pos, "Base type does not match previous declaration")
+ type.base_type = base_type
+ #
+ # Make a new entry if needed
+ #
+ if not entry:
+ type = PyExtensionType(name, typedef_flag, base_type)
+ if visibility == 'extern':
+ type.module_name = module_name
+ else:
+ type.module_name = self.qualified_name
+ type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
+ entry = self.declare_type(name, type, pos, visibility = visibility,
+ defining = 0)
+ if options and options.objstruct_cname:
+ type.objstruct_cname = options.objstruct_cname
+ elif not entry.in_cinclude:
+ type.objstruct_cname = self.mangle(Naming.objstruct_prefix, name)
+ else:
+ error(entry.pos,
+ "Object name required for 'public' or 'extern' C class")
+ self.attach_var_entry_to_c_class(entry)
+ self.c_class_entries.append(entry)
+ #
+ # Check for re-definition and create scope if needed
+ #
+ scope = type.scope
+ if not scope:
+ if defining or implementing:
+ scope = CClassScope(name = name, outer_scope = self,
+ visibility = visibility, no_gc = options.no_gc)
+ if base_type:
+ scope.declare_inherited_c_attributes(base_type.scope)
+ type.set_scope(scope)
+ self.type_entries.append(entry)
+ else:
+ self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
+ else:
+ if defining and scope.defined:
+ error(pos, "C class '%s' already defined" % name)
+ elif implementing and scope.implemented:
+ error(pos, "C class '%s' already implemented" % name)
+ scope.outer_scope = self
+ #
+ # Fill in options, checking for compatibility with any previous declaration
+ #
+ if defining:
+ entry.defined_in_pxd = 1
+ if implementing: # So that filenames in runtime exceptions refer to
+ entry.pos = pos # the .pyx file and not the .pxd file
+ if visibility <> 'private' and entry.visibility <> visibility:
+ error(pos, "Class '%s' previously declared as '%s'"
+ % (name, entry.visibility))
+ if api:
+ entry.api = 1
+ if options:
+ if options.objstruct_cname:
+ if type.objstruct_cname and type.objstruct_cname <> options.objstruct_cname:
+ error(pos, "Object struct name differs from previous declaration")
+ type.objstruct_cname = options.objstruct_cname
+ if options.typeobj_cname:
+ if type.typeobj_cname and type.typeobj_cname <> options.typeobj_cname:
+ error(pos, "Type object name differs from previous declaration")
+ type.typeobj_cname = options.typeobj_cname
+ #
+ # Return new or existing entry
+ #
+ return entry
+
+ def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
+ if typedef_flag and not self.in_cinclude:
+ error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
+
+ def allocate_vtable_names(self, entry):
+ # If extension type has a vtable, allocate vtable struct and
+ # slot names for it.
+ type = entry.type
+ if type.base_type and type.base_type.vtabslot_cname:
+ #print "...allocating vtabslot_cname because base type has one" ###
+ type.vtabslot_cname = "%s.%s" % (
+ Naming.obj_base_cname, type.base_type.vtabslot_cname)
+ elif type.scope and type.scope.cfunc_entries:
+ #print "...allocating vtabslot_cname because there are C methods" ###
+ type.vtabslot_cname = Naming.vtabslot_cname
+ if type.vtabslot_cname:
+ #print "...allocating other vtable related cnames" ###
+ type.vtabstruct_cname = self.mangle(Naming.vtabstruct_prefix, entry.name)
+ type.vtabptr_cname = self.mangle(Naming.vtabptr_prefix, entry.name)
+
+ def check_c_classes(self):
+ # Performs post-analysis checking and finishing up of extension types
+ # being implemented in this module. This is called only for the main
+ # .pyx file scope and its associated .pxd scope, not for cimported .pxd
+ # scopes.
+ #
+ # Checks all extension types declared in this scope to
+ # make sure that:
+ #
+ # * The extension type is implemented
+ # * All required object and type names have been specified or generated
+ # * All non-inherited C methods are implemented
+ #
+ # Also allocates a name for the vtable if needed.
+ #
+ debug_check_c_classes = 0
+ if debug_check_c_classes:
+ print "Scope.check_c_classes: checking scope", self.qualified_name
+ for entry in self.c_class_entries:
+ if debug_check_c_classes:
+ print "...entry", entry.name, entry
+ print "......type =", entry.type
+ print "......visibility =", entry.visibility
+ type = entry.type
+ name = entry.name
+ visibility = entry.visibility
+ # Check defined
+ if not type.scope:
+ error(entry.pos, "C class '%s' is declared but not defined" % name)
+ # Generate typeobj_cname
+ if visibility <> 'extern' and not type.typeobj_cname:
+ type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
+ ## Generate typeptr_cname
+ #type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
+ # Check C methods defined
+ if type.scope:
+ for method_entry in type.scope.cfunc_entries:
+ if not method_entry.is_inherited and not method_entry.func_cname:
+ error(method_entry.pos, "C method '%s' is declared but not defined" %
+ method_entry.name)
+ # Allocate vtable name if necessary
+ if type.vtabslot_cname:
+ #print "ModuleScope.check_c_classes: allocating vtable cname for", self ###
+ type.vtable_cname = self.mangle(Naming.vtable_prefix, entry.name)
+
+
+class DefinitionScope(ModuleScope):
+ # Scope for the definition part of a module (.pxd).
+ #
+ # parent_module Scope Parent in the import namespace
+ # module_entries {string : Entry} For cimport statements
+
+ def __init__(self, name, parent_module, context):
+ ModuleScope.__init__(self, name, parent_module, context)
+ self.parent_module = parent_module
+ self.module_entries = {}
+
+ def find_module(self, module_name, pos):
+ # Find a module in the import namespace, interpreting
+ # relative imports relative to this module's parent.
+ # Finds and parses the module's .pxd file if the module
+ # has not been referenced before.
+ return self.global_scope().context.find_module(
+ module_name, relative_to = self.parent_module, pos = pos)
+
+ def find_submodule(self, name):
+ # Find and return the definition scope for a submodule of this module,
+ # creating a new empty one if necessary. Doesn't parse .pxd.
+ scope = self.lookup_submodule(name)
+ if not scope:
+ scope = DefinitionScope(name,
+ parent_module = self, context = self.context)
+ self.module_entries[name] = scope
+ return scope
+
+ def lookup_submodule(self, name):
+ # Return scope for submodule of this module, or None.
+ return self.module_entries.get(name, None)
+
+
+class ImplementationScope(ModuleScope):
+ # This scope is used to keep the names declared only in the implementation
+ # part of a module from being seen by other modules that cimport this
+ # module. Also holds information that is only relevant for the
+ # implementation part. When declaring or looking up a name, this scope
+ # behaves as though it and its corresponding definition_scope were a single
+ # scope.
+ #
+ # definition_scope ModuleScope Scope holding definitions from corresponding .pxd
+ # doc_cname string C name of module doc string
+ # default_counter string Counter for naming default values
+ # #const_counter integer Counter for naming constants
+ # #utility_code_used [string] Utility code to be included
+ # default_entries [Entry] Function argument default entries
+ # #string_to_entry {string : Entry} Map string const to entry
+ # #intern_map {string : string} Mapping from Python names to interned strs
+ # #interned_names [string] Interned names pending generation of declarations
+ # #all_pystring_entries [Entry] Python string consts from all scopes
+
+ def __init__(self, def_scope):
+ ModuleScope.__init__(self, def_scope.name, def_scope.parent_scope,
+ def_scope.context)
+ self.definition_scope = def_scope
+ self.doc_cname = Naming.moddoc_cname
+ self.type_names = def_scope.type_names.copy()
+ self.default_counter = 1
+ #self.const_counter = 1
+ #self.utility_code_used = []
+ self.default_entries = []
+ #self.string_to_entry = {}
+ #self.intern_map = {}
+ #self.interned_names = []
+ #self.all_pystring_entries = []
+
+ def lookup_here(self, name):
+ entry = Scope.lookup_here(self, name)
+ if not entry:
+ entry = self.definition_scope.lookup_here(name)
+ return entry
+
+ def find_module(self, module_name, pos):
+ return self.definition_scope.find_module(module_name, pos)
+
+ def check_c_classes(self):
+ self.definition_scope.check_c_classes()
+ ModuleScope.check_c_classes(self)
+
+
+class LocalScope(Scope):
+
+ def __init__(self, name, outer_scope):
+ Scope.__init__(self, name, outer_scope, outer_scope)
+
+ def mangle(self, prefix, name):
+ return prefix + name
+
+ def declare_arg(self, name, type, pos, readonly = 0):
+ # Add an entry for an argument of a function.
+ #print "LocalScope.declare_arg:", name, "readonly =", readonly ###
+ cname = self.mangle(Naming.var_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ entry.is_readonly = readonly
+ if type.is_pyobject:
+ entry.init = "0"
+ #entry.borrowed = 1 # Not using borrowed arg refs for now
+ self.arg_entries.append(entry)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a local variable.
+ if visibility in ('public', 'readonly'):
+ error(pos, "Local variable cannot be declared %s" % visibility)
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ entry.init_to_none = type.is_pyobject
+ self.var_entries.append(entry)
+ return entry
+
+ def declare_global(self, name, pos):
+ # Pull entry from global scope into local scope.
+ if self.lookup_here(name):
+ error(pos, "'%s' already declared")
+ else:
+ entry = self.global_scope().lookup_target(name)
+ self.entries[name] = entry
+
+
+class StructOrUnionScope(Scope):
+ # Namespace of a C struct or union.
+ #
+ # cplus_constructors [CFuncType] C++ constructor signatures
+
+ def __init__(self, is_cplus = False, base_scopes = []):
+ Scope.__init__(self, "?", None, None)
+ self.base_scopes = base_scopes
+ self.is_cplus = is_cplus
+ if is_cplus:
+ constructors = []
+ for base in base_scopes:
+ constructors.extend(base.cplus_constructors)
+ self.cplus_constructors = constructors
+
+ def lookup_here(self, name):
+ entry = Scope.lookup_here(self, name)
+ if not entry:
+ for base in self.base_scopes:
+ entry = base.lookup_here(name)
+ if entry:
+ break
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', **kwds):
+ # Add an entry for an attribute.
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ self.var_entries.append(entry)
+ if type.is_pyobject:
+ error(pos,
+ "C struct/union member cannot be a Python object")
+ if visibility <> 'private':
+ error(pos,
+ "C struct/union member cannot be declared %s" % visibility)
+ return entry
+
+ def declare_cfunction(self, name, type, pos, **kwds):
+ #print "StructOrUnionScope.declare_cfunction:", name ###
+ if not self.is_cplus:
+ error(pos, "C struct/union member cannot be a function")
+ # Define it anyway to suppress further errors
+ elif name == "__init__":
+ type.pos = pos
+ self.cplus_constructors.append(type)
+ return
+ #kwds['defining'] = 1
+ #Scope.declare_cfunction(self, name, type, pos, *args, **kwds)
+ self.declare_var(name, type, pos, **kwds)
+
+
+class ClassScope(Scope):
+ # Abstract base class for namespace of
+ # Python class or extension type.
+ #
+ # class_name string Pyrex name of the class
+ # scope_prefix string Additional prefix for names
+ # declared in the class
+ # doc string or None Doc string
+
+ def __init__(self, name, outer_scope):
+ Scope.__init__(self, name, outer_scope, outer_scope)
+ self.class_name = name
+ self.doc = None
+
+ def add_string_const(self, value):
+ return self.outer_scope.add_string_const(value)
+
+
+class PyClassScope(ClassScope):
+ # Namespace of a Python class.
+ #
+ # class_dict_cname string C variable holding class dict
+ # class_obj_cname string C variable holding class object
+
+ is_py_class_scope = 1
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a class attribute.
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ entry.is_pyglobal = 1
+ entry.namespace_cname = self.class_obj_cname
+ #if Options.intern_names:
+ # entry.interned_cname = self.intern(name)
+ return entry
+
+ def allocate_temp(self, type):
+ return self.outer_scope.allocate_temp(type)
+
+ def release_temp(self, cname):
+ self.outer_scope.release_temp(cname)
+
+ #def recycle_pending_temps(self):
+ # self.outer_scope.recycle_pending_temps()
+
+ def add_default_value(self, type):
+ return self.outer_scope.add_default_value(type)
+
+
+class CClassScope(ClassScope):
+ # Namespace of an extension type.
+ #
+ # parent_type CClassType
+ # #typeobj_cname string or None
+ # #objstruct_cname string
+ # method_table_cname string
+ # member_table_cname string
+ # getset_table_cname string
+ # has_pyobject_attrs boolean Any PyObject attributes?
+ # pyattr_entries [Entry]
+ # public_attr_entries boolean public/readonly attrs
+ # property_entries [Entry]
+ # defined boolean Defined in .pxd file
+ # implemented boolean Defined in .pyx file
+ # inherited_var_entries [Entry] Adapted var entries from base class
+ # no_gc boolean No GC even if there are Python attributes
+
+ is_c_class_scope = 1
+
+ def __init__(self, name, outer_scope, visibility, no_gc = 0):
+ ClassScope.__init__(self, name, outer_scope)
+ if visibility <> 'extern':
+ self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
+ self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
+ self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
+ self.has_pyobject_attrs = 0
+ self.pyattr_entries = []
+ self.public_attr_entries = []
+ self.property_entries = []
+ self.inherited_var_entries = []
+ self.defined = 0
+ self.implemented = 0
+ self.no_gc = no_gc
+
+ def needs_gc(self):
+ # If the type or any of its base types have Python-valued
+ # C attributes, then it needs to participate in GC.
+ return self.has_pyobject_attrs or \
+ (self.parent_type.base_type and \
+ self.parent_type.base_type.scope.needs_gc())
+
+ def declare_builtin_var(self, name, type, cname):
+ entry = self.declare(name, cname or name, type, None)
+ entry.is_variable = 1
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for an attribute.
+ if self.defined:
+ error(pos,
+ "C attributes cannot be added in implementation part of"
+ " extension type")
+ if get_special_method_signature(name):
+ error(pos,
+ "The name '%s' is reserved for a special method."
+ % name)
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.visibility = visibility
+ entry.is_variable = 1
+ self.var_entries.append(entry)
+ if type.is_pyobject and name <> "__weakref__":
+ self.has_pyobject_attrs = 1
+ self.pyattr_entries.append(entry)
+ if visibility not in ('private', 'public', 'readonly'):
+ error(pos,
+ "Attribute of extension type cannot be declared %s" % visibility)
+ if visibility in ('public', 'readonly'):
+ if type.pymemberdef_typecode:
+ self.public_attr_entries.append(entry)
+ if name == "__weakref__":
+ error(pos, "Special attribute __weakref__ cannot be exposed to Python")
+ else:
+ error(pos,
+ "C attribute of type '%s' cannot be accessed from Python" % type)
+ if visibility == 'public' and type.is_extension_type:
+ error(pos,
+ "Non-generic Python attribute cannot be exposed for writing from Python")
+ return entry
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a method.
+ if name == "__new__":
+ error(pos, "__new__ method of extension type will change semantics "
+ "in a future version of Pyrex. Use __cinit__ instead.")
+ name = "__cinit__"
+ entry = self.lookup_here(name)
+ if entry and entry.is_builtin_method:
+ self.overriding_builtin_method(name, pos)
+ else:
+ entry = self.declare(name, name, py_object_type, pos)
+ special_sig = get_special_method_signature(name)
+ if special_sig:
+ entry.is_special = 1
+ entry.signature = special_sig
+ # Special methods don't get put in the method table
+ else:
+ entry.signature = pymethod_signature
+ self.pyfunc_entries.append(entry)
+ return entry
+
+ def overriding_builtin_method(self, name, pos):
+ error(pos, "Cannot override builtin method '%s' of class '%s'" % (
+ name, self.parent_type.base_type.name))
+
+ def lookup_here(self, name):
+ if name == "__new__":
+ name = "__cinit__"
+ return ClassScope.lookup_here(self, name)
+
+ def declare_builtin_method(self, name, type, cname):
+ entry = ClassScope.add_cfunction(self, name, type, None, cname)
+ entry.is_builtin_method = 1
+ return entry
+
+ def declare_cfunction(self, name, type, pos,
+ cname = None, visibility = 'private', defining = 0, api = 0, in_pxd = 0):
+ if get_special_method_signature(name):
+ error(pos, "Special methods must be declared with 'def', not 'cdef'")
+ args = type.args
+ if not args:
+ error(pos, "C method has no self argument")
+ elif not args[0].type.same_as(self.parent_type):
+ error(pos, "Self argument of C method does not match parent type")
+ entry = self.lookup_here(name)
+ if entry:
+ if not entry.is_cfunction:
+ entry.redeclared(pos)
+ elif entry.is_builtin_method:
+ self.overriding_builtin_method(name, pos)
+ else:
+ if defining and entry.func_cname:
+ error(pos, "'%s' already defined" % name)
+ if not entry.type.same_as(type, as_cmethod = 1):
+ error(pos, "Signature does not match previous declaration")
+ error(entry.pos, "Previous declaration is here")
+ else:
+ if self.defined:
+ error(pos,
+ "C method '%s' not previously declared in definition part of"
+ " extension type" % name)
+ entry = self.add_cfunction(name, type, pos, cname or name, visibility)
+ if defining:
+ entry.func_cname = self.mangle(Naming.func_prefix, name)
+ return entry
+
+ def add_cfunction(self, name, type, pos, cname, visibility):
+ # Add a cfunction entry without giving it a func_cname.
+ entry = ClassScope.add_cfunction(self, name, type, pos, cname, visibility)
+ entry.is_cmethod = 1
+ return entry
+
+ def declare_property(self, name, doc, pos):
+ entry = self.declare(name, name, py_object_type, pos)
+ entry.is_property = 1
+ entry.doc = doc
+ entry.scope = PropertyScope(name,
+ outer_scope = self.global_scope(), parent_scope = self)
+ entry.scope.parent_type = self.parent_type
+ self.property_entries.append(entry)
+ return entry
+
+ def declare_inherited_c_attributes(self, base_scope):
+ # Declare entries for all the C attributes of an
+ # inherited type, with cnames modified appropriately
+ # to work with this type.
+ def adapt(cname):
+ return "%s.%s" % (Naming.obj_base_cname, base_entry.cname)
+ for base_entry in \
+ base_scope.inherited_var_entries + base_scope.var_entries:
+ entry = self.declare(base_entry.name, adapt(base_entry.cname),
+ base_entry.type, None)
+ entry.is_variable = 1
+ self.inherited_var_entries.append(entry)
+ for base_entry in base_scope.cfunc_entries:
+ cname = base_entry.cname
+ if base_entry.is_builtin_method:
+ self.entries[base_entry.name] = base_entry
+ else:
+ entry = self.add_cfunction(base_entry.name, base_entry.type,
+ base_entry.pos, adapt(base_entry.cname), base_entry.visibility)
+ entry.is_inherited = 1
+
+
+class PropertyScope(Scope):
+ # Scope holding the __get__, __set__ and __del__ methods for
+ # a property of an extension type.
+ #
+ # parent_type PyExtensionType The type to which the property belongs
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a method.
+ entry = self.declare(name, name, py_object_type, pos)
+ signature = get_property_accessor_signature(name)
+ if signature:
+ entry.is_special = 1
+ entry.signature = signature
+ else:
+ error(pos, "Only __get__, __set__ and __del__ methods allowed "
+ "in a property declaration")
+ entry.signature = pymethod_signature
+ return entry
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/TypeSlots.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/TypeSlots.py
new file mode 100644
index 00000000..9cb858e3
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/TypeSlots.py
@@ -0,0 +1,629 @@
+#
+# Pyrex - Tables describing slots in the type object
+# and associated know-how.
+#
+
+import Naming
+import PyrexTypes
+
+class Signature:
+ # Method slot signature descriptor.
+ #
+ # has_dummy_arg boolean
+ # has_generic_args boolean
+ # fixed_arg_format string
+ # ret_format string
+ # error_value string
+ #
+ # The formats are strings made up of the following
+ # characters:
+ #
+ # 'O' Python object
+ # 'T' Python object of the type of 'self'
+ # 't' Python type object
+ # 'v' void
+ # 'p' void *
+ # 'P' void **
+ # 'i' int
+ # 'I' int *
+ # 'l' long
+ # 'Z' Py_ssize_t
+ # 's' char *
+ # 'S' char **
+ # 'r' int used only to signal exception
+ # '-' dummy 'self' argument (not used)
+ # '*' rest of args passed as generic Python
+ # arg tuple and kw dict (must be last
+ # char in format string)
+
+ format_map = {
+ 'O': PyrexTypes.py_object_type,
+ 't': PyrexTypes.py_type_type,
+ 'v': PyrexTypes.c_void_type,
+ 'p': PyrexTypes.c_void_ptr_type,
+ 'P': PyrexTypes.c_void_ptr_ptr_type,
+ 'b': PyrexTypes.c_int_type, # boolean - no error value
+ 'i': PyrexTypes.c_int_type,
+ 'I': PyrexTypes.c_int_ptr_type,
+ 'l': PyrexTypes.c_long_type,
+ 'Z': PyrexTypes.c_py_ssize_t_type,
+ 's': PyrexTypes.c_char_ptr_type,
+ 'S': PyrexTypes.c_char_ptr_ptr_type,
+ 'r': PyrexTypes.c_returncode_type,
+ # 'T', '-' and '*' are handled otherwise
+ # and are not looked up in here
+ }
+
+ error_value_map = {
+ 'O': "0",
+ 't': "0",
+ 'i': "-1",
+ 'l': "-1",
+ 'r': "-1",
+ 'Z': "-1",
+ }
+
+ def __init__(self, arg_format, ret_format):
+ self.has_dummy_arg = 0
+ self.has_generic_args = 0
+ if arg_format[:1] == '-':
+ self.has_dummy_arg = 1
+ arg_format = arg_format[1:]
+ if arg_format[-1:] == '*':
+ self.has_generic_args = 1
+ arg_format = arg_format[:-1]
+ self.fixed_arg_format = arg_format
+ self.ret_format = ret_format
+ self.error_value = self.error_value_map.get(ret_format, None)
+
+ def num_fixed_args(self):
+ return len(self.fixed_arg_format)
+
+ def is_self_arg(self, i):
+ return self.fixed_arg_format[i] == 'T'
+
+ def fixed_arg_type(self, i):
+ return self.format_map[self.fixed_arg_format[i]]
+
+ def return_type(self):
+ return self.format_map[self.ret_format]
+
+ def exception_value(self):
+ return self.error_value_map.get(self.ret_format)
+
+ def function_type(self, self_type = None):
+ # Construct a C function type descriptor for this signature
+ args = []
+ #for i in xrange(self.num_fixed_args()):
+ # arg_type = self.fixed_arg_type(i)
+ for c in self.fixed_arg_format:
+ if c == "T":
+ assert self_type is not None
+ arg_type = self_type
+ else:
+ arg_type = self.format_map[c]
+ args.append(PyrexTypes.CFuncTypeArg("", arg_type, None))
+ ret_type = self.return_type()
+ exc_value = self.exception_value()
+ return PyrexTypes.CFuncType(ret_type, args, exception_value = exc_value)
+
+
+class SlotDescriptor:
+ # Abstract base class for type slot descriptors.
+ #
+ # slot_name string Member name of the slot in the type object
+ # is_initialised_dynamically Is initialised by code in the module init function
+ # flag Py_TPFLAGS_XXX value indicating presence of slot
+
+ def __init__(self, slot_name, dynamic = 0, flag = None):
+ self.slot_name = slot_name
+ self.is_initialised_dynamically = dynamic
+ self.flag = flag
+
+ def generate(self, scope, code):
+ if self.is_initialised_dynamically:
+ value = 0
+ else:
+ value = self.slot_code(scope)
+ flag = self.flag
+ if flag:
+ code.putln("#if Py_TPFLAGS_DEFAULT & %s" % flag)
+ code.putln("%s, /*%s*/" % (value, self.slot_name))
+ if flag:
+ code.putln("#endif")
+
+ # Some C implementations have trouble statically
+ # initialising a global with a pointer to an extern
+ # function, so we initialise some of the type slots
+ # in the module init function instead.
+
+ def generate_dynamic_init_code(self, scope, code):
+ if self.is_initialised_dynamically:
+ value = self.slot_code(scope)
+ if value <> "0":
+ code.putln("%s.%s = %s;" % (
+ scope.parent_type.typeobj_cname,
+ self.slot_name,
+ value
+ )
+ )
+
+
+class FixedSlot(SlotDescriptor):
+ # Descriptor for a type slot with a fixed value.
+ #
+ # value string
+
+ def __init__(self, slot_name, value):
+ SlotDescriptor.__init__(self, slot_name)
+ self.value = value
+
+ def slot_code(self, scope):
+ return self.value
+
+
+class EmptySlot(FixedSlot):
+ # Descriptor for a type slot whose value is always 0.
+
+ def __init__(self, slot_name):
+ FixedSlot.__init__(self, slot_name, "0")
+
+
+class GCDependentSlot(SlotDescriptor):
+ # Descriptor for a slot whose value depends on whether
+ # the type participates in GC.
+
+ def __init__(self, slot_name, no_gc_value, gc_value, dynamic = 0):
+ SlotDescriptor.__init__(self, slot_name, dynamic)
+ self.no_gc_value = no_gc_value
+ self.gc_value = gc_value
+
+ def slot_code(self, scope):
+ if scope.has_pyobject_attrs:
+ return self.gc_value
+ else:
+ return self.no_gc_value
+
+
+class MethodSlot(SlotDescriptor):
+ # Type slot descriptor for a user-definable method.
+ #
+ # signature Signature
+ # method_name string The __xxx__ name of the method
+ # default string or None Default value of the slot
+
+ def __init__(self, signature, slot_name, method_name, default = None, flag = None):
+ SlotDescriptor.__init__(self, slot_name, flag = flag)
+ self.signature = signature
+ self.slot_name = slot_name
+ self.method_name = method_name
+ self.default = default
+ method_name_to_slot[method_name] = self
+
+ def slot_code(self, scope):
+ entry = scope.lookup_here(self.method_name)
+ if entry:
+ return entry.func_cname
+ else:
+ return "0"
+
+
+class InternalMethodSlot(SlotDescriptor):
+ # Type slot descriptor for a method which is always
+ # synthesized by Pyrex.
+ #
+ # slot_name string Member name of the slot in the type object
+
+ def __init__(self, slot_name):
+ SlotDescriptor.__init__(self, slot_name)
+
+ def slot_code(self, scope):
+ return scope.mangle_internal(self.slot_name)
+
+
+class PyAttrDependentSlot(InternalMethodSlot):
+ # Type slot for a method that is synthesized only
+ # when the extension type has Python-valued attributes.
+
+ def slot_code(self, scope):
+ if scope.pyattr_entries:
+ return InternalMethodSlot.slot_code(self, scope)
+ else:
+ return "0"
+
+
+class SyntheticSlot(InternalMethodSlot):
+ # Type slot descriptor for a synthesized method which
+ # dispatches to one or more user-defined methods depending
+ # on its arguments. If none of the relevant methods are
+ # defined, the method will not be synthesized and an
+ # alternative default value will be placed in the type
+ # slot.
+
+ def __init__(self, slot_name, user_methods, default_value):
+ InternalMethodSlot.__init__(self, slot_name)
+ self.user_methods = user_methods
+ self.default_value = default_value
+
+ def slot_code(self, scope):
+ if scope.defines_any(self.user_methods):
+ return InternalMethodSlot.slot_code(self, scope)
+ else:
+ return self.default_value
+
+
+class TypeFlagsSlot(SlotDescriptor):
+ # Descriptor for the type flags slot.
+
+ def slot_code(self, scope):
+ value = "Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_BASETYPE"
+ if scope.pyattr_entries and not scope.no_gc:
+ value += "|Py_TPFLAGS_HAVE_GC"
+ return value
+
+
+class DocStringSlot(SlotDescriptor):
+ # Descriptor for the docstring slot.
+
+ def slot_code(self, scope):
+ if scope.doc is not None:
+ return '"%s"' % scope.doc
+ else:
+ return "0"
+
+
+class SuiteSlot(SlotDescriptor):
+ # Descriptor for a substructure of the type object.
+ #
+ # sub_slots [SlotDescriptor]
+
+ def __init__(self, sub_slots, slot_type, slot_name):
+ SlotDescriptor.__init__(self, slot_name)
+ self.sub_slots = sub_slots
+ self.slot_type = slot_type
+ substructures.append(self)
+
+ def substructure_cname(self, scope):
+ return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name)
+
+ def slot_code(self, scope):
+ return "&%s" % self.substructure_cname(scope)
+
+ def generate_substructure(self, scope, code):
+ code.putln("")
+ code.putln(
+ "static %s %s = {" % (
+ self.slot_type,
+ self.substructure_cname(scope)))
+ for slot in self.sub_slots:
+ slot.generate(scope, code)
+ code.putln("};")
+
+substructures = [] # List of all SuiteSlot instances
+
+class MethodTableSlot(SlotDescriptor):
+ # Slot descriptor for the method table.
+
+ def slot_code(self, scope):
+ return scope.method_table_cname
+
+
+class MemberTableSlot(SlotDescriptor):
+ # Slot descriptor for the table of Python-accessible attributes.
+
+ def slot_code(self, scope):
+ if scope.public_attr_entries:
+ return scope.member_table_cname
+ else:
+ return "0"
+
+
+class GetSetSlot(SlotDescriptor):
+ # Slot descriptor for the table of attribute get & set methods.
+
+ def slot_code(self, scope):
+ if scope.property_entries:
+ return scope.getset_table_cname
+ else:
+ return "0"
+
+
+class BaseClassSlot(SlotDescriptor):
+ # Slot descriptor for the base class slot.
+
+ def __init__(self, name):
+ SlotDescriptor.__init__(self, name, dynamic = 1)
+
+ def generate_dynamic_init_code(self, scope, code):
+ base_type = scope.parent_type.base_type
+ if base_type:
+ code.putln("%s.%s = %s;" % (
+ scope.parent_type.typeobj_cname,
+ self.slot_name,
+ base_type.typeptr_cname))
+
+
+# The following dictionary maps __xxx__ method names to slot descriptors.
+
+method_name_to_slot = {}
+
+## The following slots are (or could be) initialised with an
+## extern function pointer.
+#
+#slots_initialised_from_extern = (
+# "tp_free",
+#)
+
+#------------------------------------------------------------------------------------------
+#
+# Utility functions for accessing slot table data structures
+#
+#------------------------------------------------------------------------------------------
+
+def get_special_method_signature(name):
+ # Given a method name, if it is a special method,
+ # return its signature, else return None.
+ slot = method_name_to_slot.get(name)
+ if slot:
+ return slot.signature
+ else:
+ return None
+
+def get_property_accessor_signature(name):
+ # Return signature of accessor for an extension type
+ # property, else None.
+ return property_accessor_signatures.get(name)
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for generic Python functions and methods.
+#
+#------------------------------------------------------------------------------------------
+
+pyfunction_signature = Signature("-*", "O")
+pymethod_signature = Signature("T*", "O")
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for the various kinds of function that
+# can appear in the type object and its substructures.
+#
+#------------------------------------------------------------------------------------------
+
+unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *);
+binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
+ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
+ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *);
+lenfunc = Signature("T", "Z") # typedef Py_ssize_t (*lenfunc)(PyObject *);
+ # typedef int (*coercion)(PyObject **, PyObject **);
+intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int);
+ssizeargfunc = Signature("TZ", "O") # typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
+intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int);
+ssizessizeargfunc = Signature("TZZ", "O") # typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t);
+intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
+ssizeobjargproc = Signature("TZO", 'r') # typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
+intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
+ssizessizeobjargproc = Signature("TZZO", 'r') # typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
+intintargproc = Signature("Tii", 'r')
+ssizessizeargproc = Signature("TZZ", 'r')
+objargfunc = Signature("TO", "O")
+objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *);
+getreadbufferproc = Signature("TiP", 'i') # typedef int (*getreadbufferproc)(PyObject *, int, void **);
+getwritebufferproc = Signature("TiP", 'i') # typedef int (*getwritebufferproc)(PyObject *, int, void **);
+getsegcountproc = Signature("TI", 'i') # typedef int (*getsegcountproc)(PyObject *, int *);
+getcharbufferproc = Signature("TiS", 'i') # typedef int (*getcharbufferproc)(PyObject *, int, const char **);
+readbufferproc = Signature("TZP", "Z") # typedef Py_ssize_t (*readbufferproc)(PyObject *, Py_ssize_t, void **);
+writebufferproc = Signature("TZP", "Z") # typedef Py_ssize_t (*writebufferproc)(PyObject *, Py_ssize_t, void **);
+segcountproc = Signature("TZ", "Z") # typedef Py_ssize_t (*segcountproc)(PyObject *, Py_ssize_t *);
+writebufferproc = Signature("TZS", "Z") # typedef Py_ssize_t (*charbufferproc)(PyObject *, Py_ssize_t, char **);
+objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *);
+ # typedef int (*visitproc)(PyObject *, void *);
+ # typedef int (*traverseproc)(PyObject *, visitproc, void *);
+
+destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *);
+# printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int);
+ # typedef PyObject *(*getattrfunc)(PyObject *, char *);
+getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
+ # typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
+setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
+delattrofunc = Signature("TO", 'r')
+cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
+reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
+hashfunc = Signature("T", "l") # typedef long (*hashfunc)(PyObject *);
+ # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+richcmpfunc = Signature("OOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
+iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
+descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
+descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
+descrdelfunc = Signature("TO", 'r')
+initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
+ # typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
+ # typedef PyObject *(*allocfunc)(struct _typeobject *, int);
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for accessor methods of properties.
+#
+#------------------------------------------------------------------------------------------
+
+property_accessor_signatures = {
+ '__get__': Signature("T", "O"),
+ '__set__': Signature("TO", 'r'),
+ '__del__': Signature("T", 'r')
+}
+
+#------------------------------------------------------------------------------------------
+#
+# Descriptor tables for the slots of the various type object
+# substructures, in the order they appear in the structure.
+#
+#------------------------------------------------------------------------------------------
+
+PyNumberMethods = (
+ MethodSlot(binaryfunc, "nb_add", "__add__"),
+ MethodSlot(binaryfunc, "nb_subtract", "__sub__"),
+ MethodSlot(binaryfunc, "nb_multiply", "__mul__"),
+ MethodSlot(binaryfunc, "nb_divide", "__div__"),
+ MethodSlot(binaryfunc, "nb_remainder", "__mod__"),
+ MethodSlot(binaryfunc, "nb_divmod", "__divmod__"),
+ MethodSlot(ternaryfunc, "nb_power", "__pow__"),
+ MethodSlot(unaryfunc, "nb_negative", "__neg__"),
+ MethodSlot(unaryfunc, "nb_positive", "__pos__"),
+ MethodSlot(unaryfunc, "nb_absolute", "__abs__"),
+ MethodSlot(inquiry, "nb_nonzero", "__nonzero__"),
+ MethodSlot(unaryfunc, "nb_invert", "__invert__"),
+ MethodSlot(binaryfunc, "nb_lshift", "__lshift__"),
+ MethodSlot(binaryfunc, "nb_rshift", "__rshift__"),
+ MethodSlot(binaryfunc, "nb_and", "__and__"),
+ MethodSlot(binaryfunc, "nb_xor", "__xor__"),
+ MethodSlot(binaryfunc, "nb_or", "__or__"),
+ EmptySlot("nb_coerce"),
+ MethodSlot(unaryfunc, "nb_int", "__int__"),
+ MethodSlot(unaryfunc, "nb_long", "__long__"),
+ MethodSlot(unaryfunc, "nb_float", "__float__"),
+ MethodSlot(unaryfunc, "nb_oct", "__oct__"),
+ MethodSlot(unaryfunc, "nb_hex", "__hex__"),
+
+ # Added in release 2.0
+ MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
+ MethodSlot(ternaryfunc, "nb_inplace_power", "__ipow__"), # NOT iternaryfunc!!!
+ MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"),
+
+ # Added in release 2.2
+ # The following require the Py_TPFLAGS_HAVE_CLASS flag
+ MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"),
+ MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"),
+ MethodSlot(unaryfunc, "nb_index", "__index__", flag = "Py_TPFLAGS_HAVE_INDEX")
+)
+
+PySequenceMethods = (
+ MethodSlot(lenfunc, "sq_length", "__len__"),
+ EmptySlot("sq_concat"), # nb_add used instead
+ EmptySlot("sq_repeat"), # nb_multiply used instead
+ SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
+ MethodSlot(ssizessizeargfunc, "sq_slice", "__getslice__"),
+ EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
+ SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
+ MethodSlot(cmpfunc, "sq_contains", "__contains__"),
+ EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
+ EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
+)
+
+PyMappingMethods = (
+ MethodSlot(lenfunc, "mp_length", "__len__"),
+ MethodSlot(objargfunc, "mp_subscript", "__getitem__"),
+ SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"),
+)
+
+PyBufferProcs = (
+ MethodSlot(getreadbufferproc, "bf_getreadbuffer", "__getreadbuffer__"),
+ MethodSlot(getwritebufferproc, "bf_getwritebuffer", "__getwritebuffer__"),
+ MethodSlot(getsegcountproc, "bf_getsegcount", "__getsegcount__"),
+ MethodSlot(getcharbufferproc, "bf_getcharbuffer", "__getcharbuffer__"),
+)
+
+#------------------------------------------------------------------------------------------
+#
+# The main slot table. This table contains descriptors for all the
+# top-level type slots, beginning with tp_dealloc, in the order they
+# appear in the type object.
+#
+#------------------------------------------------------------------------------------------
+
+slot_table = (
+ InternalMethodSlot("tp_dealloc"),
+ EmptySlot("tp_print"), #MethodSlot(printfunc, "tp_print", "__print__"),
+ EmptySlot("tp_getattr"),
+ EmptySlot("tp_setattr"),
+ MethodSlot(cmpfunc, "tp_compare", "__cmp__"),
+ MethodSlot(reprfunc, "tp_repr", "__repr__"),
+
+ SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"),
+ SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"),
+ SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"),
+
+ MethodSlot(hashfunc, "tp_hash", "__hash__"),
+ MethodSlot(callfunc, "tp_call", "__call__"),
+ MethodSlot(reprfunc, "tp_str", "__str__"),
+
+ SyntheticSlot("tp_getattro", ["__getattr__"], "0"), #"PyObject_GenericGetAttr"),
+ SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
+
+ SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
+
+ TypeFlagsSlot("tp_flags"),
+ DocStringSlot("tp_doc"),
+
+ PyAttrDependentSlot("tp_traverse"),
+ PyAttrDependentSlot("tp_clear"),
+
+ # Later -- synthesize a method to split into separate ops?
+ MethodSlot(richcmpfunc, "tp_richcompare", "__richcmp__"),
+
+ EmptySlot("tp_weaklistoffset"),
+
+ MethodSlot(getiterfunc, "tp_iter", "__iter__"),
+ MethodSlot(iternextfunc, "tp_iternext", "__next__"),
+
+ MethodTableSlot("tp_methods"),
+ MemberTableSlot("tp_members"),
+ GetSetSlot("tp_getset"),
+
+ BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
+ EmptySlot("tp_dict"),
+
+ SyntheticSlot("tp_descr_get", ["__get__"], "0"),
+ SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"),
+
+ EmptySlot("tp_dictoffset"),
+
+ MethodSlot(initproc, "tp_init", "__init__"),
+ EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
+ InternalMethodSlot("tp_new"),
+ # Some versions of Python 2.2 inherit the wrong value for tp_free when the
+ # type has GC but the base type doesn't, so we explicitly set it ourselves
+ # in that case.
+ GCDependentSlot("tp_free", "0", "_PyObject_GC_Del", dynamic = 1),
+
+ EmptySlot("tp_is_gc"),
+ EmptySlot("tp_bases"),
+ EmptySlot("tp_mro"),
+ EmptySlot("tp_cache"),
+ EmptySlot("tp_subclasses"),
+ EmptySlot("tp_weaklist"),
+)
+
+#------------------------------------------------------------------------------------------
+#
+# Descriptors for special methods which don't appear directly
+# in the type object or its substructures. These methods are
+# called from slot functions synthesized by Pyrex.
+#
+#------------------------------------------------------------------------------------------
+
+MethodSlot(initproc, "", "__cinit__")
+MethodSlot(destructor, "", "__dealloc__")
+MethodSlot(objobjargproc, "", "__setitem__")
+MethodSlot(objargproc, "", "__delitem__")
+MethodSlot(ssizessizeobjargproc, "", "__setslice__")
+MethodSlot(ssizessizeargproc, "", "__delslice__")
+MethodSlot(getattrofunc, "", "__getattr__")
+MethodSlot(setattrofunc, "", "__setattr__")
+MethodSlot(delattrofunc, "", "__delattr__")
+MethodSlot(descrgetfunc, "", "__get__")
+MethodSlot(descrsetfunc, "", "__set__")
+MethodSlot(descrdelfunc, "", "__delete__")
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Version.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Version.py
new file mode 100644
index 00000000..3f085698
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/Version.py
@@ -0,0 +1 @@
+version = '0.9.9'
diff --git a/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/__init__.py b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/debian/pyrex/pyrex-0.9.9/Pyrex/Compiler/__init__.py