--- /dev/null
+#
+# Pyrex - Command Line Parsing
+#
+
+import sys
+
+usage = """\
+Usage: pyrexc [options] sourcefile...
+Options:
+ -v, --version Display version number of pyrex compiler
+ -l, --create-listing Write error messages to a listing file
+ -I, --include-dir <directory> Search for include files in named directory
+ -o, --output-file <filename> Specify name of generated C file
+The following experimental options are supported only on MacOSX:
+ -C, --compile Compile generated .c file to .o file
+ -X, --link Link .o file to produce extension module (implies -C)
+ -+, --cplus Use C++ compiler for compiling and linking
+ Additional .o files to link may be supplied when using -X."""
+
+def bad_usage():
+ print >>sys.stderr, usage
+ sys.exit(1)
+
+def parse_command_line(args):
+ from Pyrex.Compiler.Main import \
+ CompilationOptions, default_options
+
+ def pop_arg():
+ if args:
+ return args.pop(0)
+ else:
+ bad_usage()
+
+ def get_param(option):
+ tail = option[2:]
+ if tail:
+ return tail
+ else:
+ return pop_arg()
+
+ options = CompilationOptions(default_options)
+ sources = []
+ while args:
+ if args[0].startswith("-"):
+ option = pop_arg()
+ if option in ("-v", "--version"):
+ options.show_version = 1
+ elif option in ("-l", "--create-listing"):
+ options.use_listing_file = 1
+ elif option in ("-C", "--compile"):
+ options.c_only = 0
+ elif option in ("-X", "--link"):
+ options.c_only = 0
+ options.obj_only = 0
+ elif option in ("-+", "--cplus"):
+ options.cplus = 1
+ elif option.startswith("-I"):
+ options.include_path.append(get_param(option))
+ elif option == "--include-dir":
+ options.include_path.append(pop_arg())
+ elif option in ("-o", "--output-file"):
+ options.output_file = pop_arg()
+ else:
+ bad_usage()
+ else:
+ arg = pop_arg()
+ if arg.endswith(".pyx"):
+ sources.append(arg)
+ elif arg.endswith(".o"):
+ options.objects.append(arg)
+ else:
+ print >>sys.stderr, \
+ "pyrexc: %s: Unknown filename suffix" % arg
+ if options.objects and len(sources) > 1:
+ print >>sys.stderr, \
+ "pyrexc: Only one source file allowed together with .o files"
+ if options.use_listing_file and len(sources) > 1:
+ print >>sys.stderr, \
+ "pyrexc: Only one source file allowed when using -o"
+ sys.exit(1)
+ return options, sources
+
--- /dev/null
+#
+# Pyrex - Code output module
+#
+
+import Naming
+from Pyrex.Utils import open_new_file
+from PyrexTypes import py_object_type, typecast
+
+class CCodeWriter:
+ # f file output file
+ # level int indentation level
+ # bol bool beginning of line?
+ # marker string comment to emit before next line
+ # return_label string function return point label
+ # error_label string error catch point label
+ # continue_label string loop continue point label
+ # break_label string loop break point label
+ # label_counter integer counter for naming labels
+ # in_try_finally boolean inside try of try...finally
+ # filename_table {string : int} for finding filename table indexes
+ # filename_list [string] filenames in filename table order
+
+ in_try_finally = 0
+
+ def __init__(self, outfile_name):
+ self.f = open_new_file(outfile_name)
+ self.level = 0
+ self.bol = 1
+ self.marker = None
+ self.label_counter = 1
+ self.error_label = None
+ self.filename_table = {}
+ self.filename_list = []
+
+ def putln(self, code = ""):
+ if self.marker and self.bol:
+ self.emit_marker()
+ if code:
+ self.put(code)
+ self.f.write("\n");
+ self.bol = 1
+
+ def emit_marker(self):
+ self.f.write("\n");
+ self.indent()
+ self.f.write("/* %s */\n" % self.marker)
+ self.marker = None
+
+ def put(self, code):
+ dl = code.count("{") - code.count("}")
+ if dl < 0:
+ self.level += dl
+ if self.bol:
+ self.indent()
+ self.f.write(code)
+ self.bol = 0
+ if dl > 0:
+ self.level += dl
+
+ def increase_indent(self):
+ self.level = self.level + 1
+
+ def decrease_indent(self):
+ self.level = self.level - 1
+
+ def begin_block(self):
+ self.putln("{")
+ self.increase_indent()
+
+ def end_block(self):
+ self.decrease_indent()
+ self.putln("}")
+
+ def indent(self):
+ self.f.write(" " * self.level)
+
+ def mark_pos(self, pos):
+ file, line, col = pos
+ self.marker = '"%s":%s' % (file, line)
+
+ def init_labels(self):
+ self.label_counter = 0
+ self.return_label = self.new_label()
+ self.new_error_label()
+ self.continue_label = None
+ self.break_label = None
+
+ def new_label(self):
+ n = self.label_counter
+ self.label_counter = n + 1
+ return "%s%d" % (Naming.label_prefix, n)
+
+ def new_error_label(self):
+ old_err_lbl = self.error_label
+ self.error_label = self.new_label()
+ return old_err_lbl
+
+ def get_loop_labels(self):
+ return (
+ self.continue_label,
+ self.break_label)
+
+ def set_loop_labels(self, labels):
+ (self.continue_label,
+ self.break_label) = labels
+
+ def new_loop_labels(self):
+ old_labels = self.get_loop_labels()
+ self.set_loop_labels(
+ (self.new_label(),
+ self.new_label()))
+ return old_labels
+
+ def get_all_labels(self):
+ return (
+ self.continue_label,
+ self.break_label,
+ self.return_label,
+ self.error_label)
+
+ def set_all_labels(self, labels):
+ (self.continue_label,
+ self.break_label,
+ self.return_label,
+ self.error_label) = labels
+
+ def all_new_labels(self):
+ old_labels = self.get_all_labels()
+ new_labels = []
+ for old_label in old_labels:
+ if old_label:
+ new_labels.append(self.new_label())
+ else:
+ new_labels.append(old_label)
+ self.set_all_labels(new_labels)
+ return old_labels
+
+ def put_label(self, lbl):
+ self.putln("%s:;" % lbl)
+
+ def put_var_declarations(self, entries, static = 0, dll_linkage = None,
+ definition = True):
+ for entry in entries:
+ if not entry.in_cinclude:
+ self.put_var_declaration(entry, static, dll_linkage, definition)
+
+ def put_var_declaration(self, entry, static = 0, dll_linkage = None,
+ definition = True):
+ #print "Code.put_var_declaration:", entry.name, "definition =", definition
+ visibility = entry.visibility
+ if visibility == 'private' and not definition:
+ return
+ if visibility == 'extern':
+ storage_class = Naming.extern_c_macro
+ elif visibility == 'public':
+ if definition:
+ storage_class = ""
+ else:
+ storage_class = Naming.extern_c_macro
+ elif visibility == 'private':
+ if static:
+ storage_class = "static"
+ else:
+ storage_class = ""
+ if storage_class:
+ self.put("%s " % storage_class)
+ #if visibility == 'extern' or visibility == 'public' and not definition:
+ # self.put("%s " % Naming.extern_c_macro)
+ #elif static and visibility <> 'public':
+ # self.put("static ")
+ if visibility <> 'public':
+ dll_linkage = None
+ self.put(entry.type.declaration_code(entry.cname,
+ dll_linkage = dll_linkage))
+ if entry.init is not None:
+ self.put(" = %s" % entry.type.literal_code(entry.init))
+ self.putln(";")
+
+ def entry_as_pyobject(self, entry):
+ type = entry.type
+ if (not entry.is_self_arg and not entry.type.is_complete()) \
+ or (entry.type.is_extension_type and entry.type.base_type):
+ return "(PyObject *)" + entry.cname
+ else:
+ return entry.cname
+
+ def as_pyobject(self, cname, type):
+ return typecast(py_object_type, type, cname)
+ #if type.is_extension_type and type.base_type:
+ # return "(PyObject *)" + cname
+ #else:
+ # return cname
+
+ def put_incref(self, cname, type):
+ self.putln("Py_INCREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_decref(self, cname, type):
+ self.putln("Py_DECREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_var_incref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_INCREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_decref_clear(self, cname, type):
+ self.putln("Py_DECREF(%s); %s = 0;" % (
+ typecast(py_object_type, type, cname), cname))
+ #self.as_pyobject(cname, type), cname))
+
+ def put_xdecref(self, cname, type):
+ self.putln("Py_XDECREF(%s);" % self.as_pyobject(cname, type))
+
+ def put_xdecref_clear(self, cname, type):
+ self.putln("Py_XDECREF(%s); %s = 0;" % (
+ self.as_pyobject(cname, type), cname))
+
+ def put_var_decref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_DECREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_var_decref_clear(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_DECREF(%s); %s = 0;" % (
+ self.entry_as_pyobject(entry), entry.cname))
+
+ def put_var_xdecref(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_XDECREF(%s);" % self.entry_as_pyobject(entry))
+
+ def put_var_xdecref_clear(self, entry):
+ if entry.type.is_pyobject:
+ self.putln("Py_XDECREF(%s); %s = 0;" % (
+ self.entry_as_pyobject(entry), entry.cname))
+
+ def put_var_decrefs(self, entries):
+ for entry in entries:
+ if entry.xdecref_cleanup:
+ self.put_var_xdecref(entry)
+ else:
+ self.put_var_decref(entry)
+
+ def put_var_xdecrefs(self, entries):
+ for entry in entries:
+ self.put_var_xdecref(entry)
+
+ def put_var_xdecrefs_clear(self, entries):
+ for entry in entries:
+ self.put_var_xdecref_clear(entry)
+
+ def put_init_to_py_none(self, cname, type):
+ py_none = typecast(type, py_object_type, "Py_None")
+ self.putln("%s = %s; Py_INCREF(Py_None);" % (cname, py_none))
+
+ def put_init_var_to_py_none(self, entry, template = "%s"):
+ code = template % entry.cname
+ #if entry.type.is_extension_type:
+ # code = "((PyObject*)%s)" % code
+ self.put_init_to_py_none(code, entry.type)
+
+ def put_pymethoddef(self, entry, term):
+ if entry.doc:
+ doc_code = entry.doc_cname
+ else:
+ doc_code = 0
+ self.putln(
+ '{"%s", (PyCFunction)%s, METH_VARARGS|METH_KEYWORDS, %s}%s' % (
+ entry.name,
+ entry.func_cname,
+ doc_code,
+ term))
+
+ def error_goto(self, pos):
+ return "{%s = %s[%s]; %s = %s; goto %s;}" % (
+ Naming.filename_cname,
+ Naming.filetable_cname,
+ self.lookup_filename(pos[0]),
+ Naming.lineno_cname,
+ pos[1],
+ self.error_label)
+
+ def lookup_filename(self, filename):
+ try:
+ index = self.filename_table[filename]
+ except KeyError:
+ index = len(self.filename_list)
+ self.filename_list.append(filename)
+ self.filename_table[filename] = index
+ return index
+
+
+class PyrexCodeWriter:
+ # f file output file
+ # level int indentation level
+
+ def __init__(self, outfile_name):
+ self.f = open_new_file(outfile_name)
+ self.level = 0
+
+ def putln(self, code):
+ self.f.write("%s%s\n" % (" " * self.level, code))
+
+ def indent(self):
+ self.level += 1
+
+ def dedent(self):
+ self.level -= 1
+
--- /dev/null
+debug_disposal_code = 0
+debug_temp_alloc = 0
+debug_coercion = 0
+
--- /dev/null
+#
+# Pyrex - Errors
+#
+
+import sys
+from Pyrex.Utils import open_new_file
+
+
+class PyrexError(Exception):
+ pass
+
+
+class CompileError(PyrexError):
+
+ def __init__(self, position = None, message = ""):
+ self.position = position
+ self.message = message
+ if position:
+ pos_str = "%s:%d:%d: " % position
+ else:
+ pos_str = ""
+ Exception.__init__(self, pos_str + message)
+
+
+class InternalError(Exception):
+ # If this is ever raised, there is a bug in the compiler.
+
+ def __init__(self, message):
+ Exception.__init__(self, "Internal compiler error: %s"
+ % message)
+
+
+listing_file = None
+num_errors = 0
+echo_file = None
+
+def open_listing_file(path, echo_to_stderr = 1):
+ # Begin a new error listing. If path is None, no file
+ # is opened, the error counter is just reset.
+ global listing_file, num_errors, echo_file
+ if path is not None:
+ listing_file = open_new_file(path)
+ else:
+ listing_file = None
+ if echo_to_stderr:
+ echo_file = sys.stderr
+ else:
+ echo_file = None
+ num_errors = 0
+
+def close_listing_file():
+ global listing_file
+ if listing_file:
+ listing_file.close()
+ listing_file = None
+
+def error(position, message):
+ #print "Errors.error:", repr(position), repr(message) ###
+ global num_errors
+ err = CompileError(position, message)
+ line = "%s\n" % err
+ if listing_file:
+ listing_file.write(line)
+ if echo_file:
+ echo_file.write(line)
+ num_errors = num_errors + 1
+ return err
--- /dev/null
+#
+# Pyrex - Parse tree nodes for expressions
+#
+
+from string import join
+
+from Errors import error, InternalError
+import Naming
+from Nodes import Node
+import PyrexTypes
+from PyrexTypes import py_object_type, typecast
+import Symtab
+import Options
+
+from Pyrex.Debugging import print_call_chain
+from DebugFlags import debug_disposal_code, debug_temp_alloc, \
+ debug_coercion
+
+class ExprNode(Node):
+ # subexprs [string] Class var holding names of subexpr node attrs
+ # type PyrexType Type of the result
+ # result_code string Code fragment
+ # result_ctype string C type of result_code if different from type
+ # is_temp boolean Result is in a temporary variable
+ # is_sequence_constructor
+ # boolean Is a list or tuple constructor expression
+ # saved_subexpr_nodes
+ # [ExprNode or [ExprNode or None] or None]
+ # Cached result of subexpr_nodes()
+
+ result_ctype = None
+
+ # The Analyse Expressions phase for expressions is split
+ # into two sub-phases:
+ #
+ # Analyse Types
+ # Determines the result type of the expression based
+ # on the types of its sub-expressions, and inserts
+ # coercion nodes into the expression tree where needed.
+ # Marks nodes which will need to have temporary variables
+ # allocated.
+ #
+ # Allocate Temps
+ # Allocates temporary variables where needed, and fills
+ # in the result_code field of each node.
+ #
+ # ExprNode provides some convenience routines which
+ # perform both of the above phases. These should only
+ # be called from statement nodes, and only when no
+ # coercion nodes need to be added around the expression
+ # being analysed. In that case, the above two phases
+ # should be invoked separately.
+ #
+ # Framework code in ExprNode provides much of the common
+ # processing for the various phases. It makes use of the
+ # 'subexprs' class attribute of ExprNodes, which should
+ # contain a list of the names of attributes which can
+ # hold sub-nodes or sequences of sub-nodes.
+ #
+ # The framework makes use of a number of abstract methods.
+ # Their responsibilities are as follows.
+ #
+ # Declaration Analysis phase
+ #
+ # analyse_target_declaration
+ # Called during the Analyse Declarations phase to analyse
+ # the LHS of an assignment or argument of a del statement.
+ # Nodes which cannot be the LHS of an assignment need not
+ # implement it.
+ #
+ # Expression Analysis phase
+ #
+ # analyse_types
+ # - Call analyse_types on all sub-expressions.
+ # - Check operand types, and wrap coercion nodes around
+ # sub-expressions where needed.
+ # - Set the type of this node.
+ # - If a temporary variable will be required for the
+ # result, set the is_temp flag of this node.
+ #
+ # analyse_target_types
+ # Called during the Analyse Types phase to analyse
+ # the LHS of an assignment or argument of a del
+ # statement. Similar responsibilities to analyse_types.
+ #
+ # allocate_temps
+ # - Call allocate_temps for all sub-nodes.
+ # - Call allocate_temp for this node.
+ # - If a temporary was allocated, call release_temp on
+ # all sub-expressions.
+ #
+ # A default implementation of allocate_temps is
+ # provided which uses the following abstract method:
+ #
+ # calculate_result_code
+ # - Return a C code fragment evaluating to
+ # the result. This is only called when the
+ # result is not a temporary.
+ #
+ # check_const
+ # - Check that this node and its subnodes form a
+ # legal constant expression. If so, do nothing,
+ # otherwise call not_const.
+ #
+ # The default implementation of check_const
+ # assumes that the expression is not constant.
+ #
+ # check_const_addr
+ # - Same as check_const, except check that the
+ # expression is a C lvalue whose address is
+ # constant. Otherwise, call addr_not_const.
+ #
+ # The default implementation of calc_const_addr
+ # assumes that the expression is not a constant
+ # lvalue.
+ #
+ # Code Generation phase
+ #
+ # generate_evaluation_code
+ # - Call generate_evaluation_code for sub-expressions.
+ # - Perform the functions of generate_result_code
+ # (see below).
+ # - If result is temporary, call generate_disposal_code
+ # on all sub-expressions.
+ #
+ # A default implementation of generate_evaluation_code
+ # is provided which uses the following abstract method:
+ #
+ # generate_result_code
+ # - Generate any C statements necessary to calculate
+ # the result of this node from the results of its
+ # sub-expressions.
+ #
+ # generate_assignment_code
+ # Called on the LHS of an assignment.
+ # - Call generate_evaluation_code for sub-expressions.
+ # - Generate code to perform the assignment.
+ # - If the assignment absorbed a reference, call
+ # generate_post_assignment_code on the RHS,
+ # otherwise call generate_disposal_code on it.
+ #
+ # generate_deletion_code
+ # Called on an argument of a del statement.
+ # - Call generate_evaluation_code for sub-expressions.
+ # - Generate code to perform the deletion.
+ # - Call generate_disposal_code on all sub-expressions.
+ #
+ # #result_as_extension_type ### OBSOLETE ###
+ # # Normally, the results of all nodes whose type
+ # # is a Python object, either generic or an extension
+ # # type, are returned as a generic Python object, so
+ # # that they can be passed directly to Python/C API
+ # # routines. This method is called to obtain the
+ # # result as the actual type of the node. It is only
+ # # called when the type is known to actually be an
+ # # extension type, and nodes whose result can never
+ # # be an extension type need not implement it.
+ #
+
+ is_sequence_constructor = 0
+ is_attribute = 0
+
+ saved_subexpr_nodes = None
+ is_temp = 0
+
+ def not_implemented(self, method_name):
+ print_call_chain(method_name, "not implemented") ###
+ raise InternalError(
+ "%s.%s not implemented" %
+ (self.__class__.__name__, method_name))
+
+ def is_lvalue(self):
+ return 0
+
+ def is_ephemeral(self):
+ # An ephemeral node is one whose result is in
+ # a Python temporary and we suspect there are no
+ # other references to it. Certain operations are
+ # disallowed on such values, since they are
+ # likely to result in a dangling pointer.
+ return self.type.is_pyobject and self.is_temp
+
+ def subexpr_nodes(self):
+ # Extract a list of subexpression nodes based
+ # on the contents of the subexprs class attribute.
+ if self.saved_subexpr_nodes is None:
+ nodes = []
+ for name in self.subexprs:
+ item = getattr(self, name)
+ if item:
+ if isinstance(item, ExprNode):
+ nodes.append(item)
+ else:
+ nodes.extend(item)
+ self.saved_subexpr_nodes = nodes
+ return self.saved_subexpr_nodes
+
+ def result_as(self, type = None):
+ # Return the result code cast to the specified C type.
+ return typecast(type, self.ctype(), self.result_code)
+
+ def py_result(self):
+ # Return the result code cast to PyObject *.
+ return self.result_as(py_object_type)
+
+ def ctype(self):
+ # Return the native C type of the result (i.e. the
+ # C type of the result_code expression).
+ return self.result_ctype or self.type
+
+ # ------------- Declaration Analysis ----------------
+
+ def analyse_target_declaration(self, env):
+ error(self.pos, "Cannot assign to or delete this")
+
+ # ------------- Expression Analysis ----------------
+
+ def analyse_const_expression(self, env):
+ # Called during the analyse_declarations phase of a
+ # constant expression. Analyses the expression's type,
+ # checks whether it is a legal const expression,
+ # and determines its value.
+ self.analyse_types(env)
+ self.allocate_temps(env)
+ self.check_const()
+
+ def analyse_expressions(self, env):
+ # Convenience routine performing both the Type
+ # Analysis and Temp Allocation phases for a whole
+ # expression.
+ self.analyse_types(env)
+ self.allocate_temps(env)
+
+ def analyse_target_expression(self, env):
+ # Convenience routine performing both the Type
+ # Analysis and Temp Allocation phases for the LHS of
+ # an assignment.
+ self.analyse_target_types(env)
+ self.allocate_target_temps(env)
+
+ def analyse_boolean_expression(self, env):
+ # Analyse expression and coerce to a boolean.
+ self.analyse_types(env)
+ bool = self.coerce_to_boolean(env)
+ bool.allocate_temps(env)
+ return bool
+
+ def analyse_temp_boolean_expression(self, env):
+ # Analyse boolean expression and coerce result into
+ # a temporary. This is used when a branch is to be
+ # performed on the result and we won't have an
+ # opportunity to ensure disposal code is executed
+ # afterwards. By forcing the result into a temporary,
+ # we ensure that all disposal has been done by the
+ # time we get the result.
+ self.analyse_types(env)
+ bool = self.coerce_to_boolean(env)
+ temp_bool = bool.coerce_to_temp(env)
+ temp_bool.allocate_temps(env)
+ return temp_bool
+
+ # --------------- Type Analysis ------------------
+
+ def analyse_as_module(self, env):
+ # If this node can be interpreted as a reference to a
+ # cimported module, return its scope, else None.
+ return None
+
+ def analyse_as_extension_type(self, env):
+ # If this node can be interpreted as a reference to an
+ # extension type, return its type, else None.
+ return None
+
+ def analyse_types(self, env):
+ self.not_implemented("analyse_types")
+
+ def analyse_target_types(self, env):
+ self.analyse_types(env)
+
+ def check_const(self):
+ self.not_const()
+
+ def not_const(self):
+ error(self.pos, "Not allowed in a constant expression")
+
+ def check_const_addr(self):
+ self.addr_not_const()
+
+ def addr_not_const(self):
+ error(self.pos, "Address is not constant")
+
+ # ----------------- Result Allocation -----------------
+
+ def result_in_temp(self):
+ # Return true if result is in a temporary owned by
+ # this node or one of its subexpressions. Overridden
+ # by certain nodes which can share the result of
+ # a subnode.
+ return self.is_temp
+
+ def allocate_target_temps(self, env):
+ # Perform allocate_temps for the LHS of an assignment.
+ if debug_temp_alloc:
+ print self, "Allocating target temps"
+ self.allocate_subexpr_temps(env)
+ self.result_code = self.target_code()
+
+ def allocate_temps(self, env, result = None):
+ # Allocate temporary variables for this node and
+ # all its sub-expressions. If a result is specified,
+ # this must be a temp node and the specified variable
+ # is used as the result instead of allocating a new
+ # one.
+ if debug_temp_alloc:
+ print self, "Allocating temps"
+ self.allocate_subexpr_temps(env)
+ self.allocate_temp(env, result)
+ if self.is_temp:
+ self.release_subexpr_temps(env)
+
+ def allocate_subexpr_temps(self, env):
+ # Allocate temporary variables for all sub-expressions
+ # of this node.
+ if debug_temp_alloc:
+ print self, "Allocating temps for:", self.subexprs
+ for node in self.subexpr_nodes():
+ if node:
+ if debug_temp_alloc:
+ print self, "Allocating temps for", node
+ node.allocate_temps(env)
+
+ def allocate_temp(self, env, result = None):
+ # If this node requires a temporary variable for its
+ # result, allocate one, otherwise set the result to
+ # a C code fragment. If a result is specified,
+ # this must be a temp node and the specified variable
+ # is used as the result instead of allocating a new
+ # one.
+ if debug_temp_alloc:
+ print self, "Allocating temp"
+ if result:
+ if not self.is_temp:
+ raise InternalError("Result forced on non-temp node")
+ self.result_code = result
+ elif self.is_temp:
+ type = self.type
+ if not type.is_void:
+ if type.is_pyobject:
+ type = PyrexTypes.py_object_type
+ self.result_code = env.allocate_temp(type)
+ else:
+ self.result_code = None
+ if debug_temp_alloc:
+ print self, "Allocated result", self.result_code
+ else:
+ self.result_code = self.calculate_result_code()
+
+ def target_code(self):
+ # Return code fragment for use as LHS of a C assignment.
+ return self.calculate_result_code()
+
+ def calculate_result_code(self):
+ self.not_implemented("calculate_result_code")
+
+ def release_target_temp(self, env):
+ # Release temporaries used by LHS of an assignment.
+ self.release_subexpr_temps(env)
+
+ def release_temp(self, env):
+ # If this node owns a temporary result, release it,
+ # otherwise release results of its sub-expressions.
+ if self.is_temp:
+ if debug_temp_alloc:
+ print self, "Releasing result", self.result_code
+ env.release_temp(self.result_code)
+ else:
+ self.release_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ # Release the results of all sub-expressions of
+ # this node.
+ for node in self.subexpr_nodes():
+ if node:
+ node.release_temp(env)
+
+ # ---------------- Code Generation -----------------
+
+ def make_owned_reference(self, code):
+ # If result is a pyobject, make sure we own
+ # a reference to it.
+ if self.type.is_pyobject and not self.result_in_temp():
+ code.put_incref(self.result_code, self.ctype())
+
+ def generate_evaluation_code(self, code):
+ # Generate code to evaluate this node and
+ # its sub-expressions, and dispose of any
+ # temporary results of its sub-expressions.
+ self.generate_subexpr_evaluation_code(code)
+ self.generate_result_code(code)
+ if self.is_temp:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_subexpr_evaluation_code(self, code):
+ for node in self.subexpr_nodes():
+ node.generate_evaluation_code(code)
+
+ def generate_result_code(self, code):
+ self.not_implemented("generate_result_code")
+
+ def generate_disposal_code(self, code):
+ # If necessary, generate code to dispose of
+ # temporary Python reference.
+ if self.is_temp:
+ if self.type.is_pyobject:
+ code.put_decref_clear(self.result_code, self.ctype())
+ else:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_subexpr_disposal_code(self, code):
+ # Generate code to dispose of temporary results
+ # of all sub-expressions.
+ for node in self.subexpr_nodes():
+ node.generate_disposal_code(code)
+
+ def generate_post_assignment_code(self, code):
+ # Same as generate_disposal_code except that
+ # assignment will have absorbed a reference to
+ # the result if it is a Python object.
+ if self.is_temp:
+ if self.type.is_pyobject:
+ code.putln("%s = 0;" % self.result_code)
+ else:
+ self.generate_subexpr_disposal_code(code)
+
+ def generate_assignment_code(self, rhs, code):
+ # Stub method for nodes which are not legal as
+ # the LHS of an assignment. An error will have
+ # been reported earlier.
+ pass
+
+ def generate_deletion_code(self, code):
+ # Stub method for nodes that are not legal as
+ # the argument of a del statement. An error
+ # will have been reported earlier.
+ pass
+
+ # ----------------- Coercion ----------------------
+
+ def coerce_to(self, dst_type, env):
+ # Coerce the result so that it can be assigned to
+ # something of type dst_type. If processing is necessary,
+ # wraps this node in a coercion node and returns that.
+ # Otherwise, returns this node unchanged.
+ #
+ # This method is called during the analyse_expressions
+ # phase of the src_node's processing.
+ src = self
+ src_type = self.type
+ src_is_py_type = src_type.is_pyobject
+ dst_is_py_type = dst_type.is_pyobject
+
+ if dst_type.is_pyobject:
+ if not src.type.is_pyobject:
+ src = CoerceToPyTypeNode(src, env)
+ if not src.type.subtype_of(dst_type):
+ src = PyTypeTestNode(src, dst_type, env)
+ elif src.type.is_pyobject:
+ src = CoerceFromPyTypeNode(dst_type, src, env)
+ else: # neither src nor dst are py types
+ if not dst_type.assignable_from(src_type):
+ error(self.pos, "Cannot assign type '%s' to '%s'" %
+ (src.type, dst_type))
+ return src
+
+ def coerce_to_pyobject(self, env):
+ return self.coerce_to(PyrexTypes.py_object_type, env)
+
+ def coerce_to_boolean(self, env):
+ # Coerce result to something acceptable as
+ # a boolean value.
+ type = self.type
+ if type.is_pyobject or type.is_ptr or type.is_float:
+ return CoerceToBooleanNode(self, env)
+ else:
+ if not type.is_int:
+ error(self.pos,
+ "Type '%s' not acceptable as a boolean" % type)
+ return self
+
+ def coerce_to_integer(self, env):
+ # If not already some C integer type, coerce to longint.
+ if self.type.is_int:
+ return self
+ else:
+ return self.coerce_to(PyrexTypes.c_long_type, env)
+
+ def coerce_to_temp(self, env):
+ # Ensure that the result is in a temporary.
+ if self.result_in_temp():
+ return self
+ else:
+ return CoerceToTempNode(self, env)
+
+ def coerce_to_simple(self, env):
+ # Ensure that the result is simple (see is_simple).
+ if self.is_simple():
+ return self
+ else:
+ return self.coerce_to_temp(env)
+
+ def is_simple(self):
+ # A node is simple if its result is something that can
+ # be referred to without performing any operations, e.g.
+ # a constant, local var, C global var, struct member
+ # reference, or temporary.
+ return self.result_in_temp()
+
+
+class AtomicExprNode(ExprNode):
+ # Abstract base class for expression nodes which have
+ # no sub-expressions.
+
+ subexprs = []
+
+
+class PyConstNode(AtomicExprNode):
+ # Abstract base class for constant Python values.
+
+ def is_simple(self):
+ return 1
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+
+ def calculate_result_code(self):
+ return self.value
+
+ def generate_result_code(self, code):
+ pass
+
+
+class NoneNode(PyConstNode):
+ # The constant value None
+
+ value = "Py_None"
+
+
+class EllipsisNode(PyConstNode):
+ # '...' in a subscript list.
+
+ value = "Py_Ellipsis"
+
+
+class ConstNode(AtomicExprNode):
+ # Abstract base type for literal constant nodes.
+ #
+ # value string C code fragment
+
+ is_literal = 1
+
+ def is_simple(self):
+ return 1
+
+ def analyse_types(self, env):
+ pass # Types are held in class variables
+
+ def check_const(self):
+ pass
+
+ def calculate_result_code(self):
+ return str(self.value)
+
+ def generate_result_code(self, code):
+ pass
+
+
+class NullNode(ConstNode):
+ type = PyrexTypes.c_null_ptr_type
+ value = "0"
+
+
+class CharNode(ConstNode):
+ type = PyrexTypes.c_char_type
+
+ def calculate_result_code(self):
+ return "'%s'" % self.value
+
+
+class IntNode(ConstNode):
+ type = PyrexTypes.c_long_type
+
+
+class FloatNode(ConstNode):
+ type = PyrexTypes.c_double_type
+
+
+class StringNode(ConstNode):
+ # entry Symtab.Entry
+
+ type = PyrexTypes.c_char_ptr_type
+
+ def analyse_types(self, env):
+ self.entry = env.add_string_const(self.value)
+
+ def coerce_to(self, dst_type, env):
+ # Arrange for a Python version of the string to be pre-allocated
+ # when coercing to a Python type.
+ if dst_type.is_pyobject and not self.type.is_pyobject:
+ node = self.as_py_string_node(env)
+ else:
+ node = self
+ # We still need to perform normal coerce_to processing on the
+ # result, because we might be coercing to an extension type,
+ # in which case a type test node will be needed.
+ return ConstNode.coerce_to(node, dst_type, env)
+
+ def as_py_string_node(self, env):
+ # Return a new StringNode with the same entry as this node
+ # but whose type is a Python type instead of a C type.
+ entry = self.entry
+ env.add_py_string(entry)
+ return StringNode(self.pos, entry = entry, type = py_object_type)
+
+ def calculate_result_code(self):
+ if self.type.is_pyobject:
+ return self.entry.pystring_cname
+ else:
+ return self.entry.cname
+
+
+class LongNode(AtomicExprNode):
+ # Python long integer literal
+ #
+ # value string
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_evaluation_code(self, code):
+ code.putln(
+ '%s = PyLong_FromString("%s", 0, 0); if (!%s) %s' % (
+ self.result_code,
+ self.value,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class ImagNode(AtomicExprNode):
+ # Imaginary number literal
+ #
+ # value float imaginary part
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_evaluation_code(self, code):
+ code.putln(
+ "%s = PyComplex_FromDoubles(0.0, %s); if (!%s) %s" % (
+ self.result_code,
+ self.value,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class NameNode(AtomicExprNode):
+ # Reference to a local or global variable name.
+ #
+ # name string Python name of the variable
+ # entry Entry Symbol table entry
+
+ is_name = 1
+
+ def analyse_as_module(self, env):
+ # Try to interpret this as a reference to a cimported module.
+ # Returns the module scope, or None.
+ entry = env.lookup(self.name)
+ if entry and entry.as_module:
+ return entry.as_module
+ return None
+
+ def analyse_as_extension_type(self, env):
+ # Try to interpret this as a reference to an extension type.
+ # Returns the extension type, or None.
+ entry = env.lookup(self.name)
+ if entry and entry.is_type and entry.type.is_extension_type:
+ return entry.type
+ return None
+
+ def analyse_target_declaration(self, env):
+ self.entry = env.lookup_here(self.name)
+ if not self.entry:
+ #print "NameNode.analyse_target_declaration:", self.name ###
+ #print "...declaring as py_object_type" ###
+ self.entry = env.declare_var(self.name, py_object_type, self.pos)
+
+ def analyse_types(self, env):
+ self.entry = env.lookup(self.name)
+ if not self.entry:
+ self.entry = env.declare_builtin(self.name, self.pos)
+ self.analyse_entry(env)
+
+ def analyse_entry(self, env):
+ self.check_identifier_kind()
+ self.type = self.entry.type
+ if self.entry.is_declared_generic:
+ self.result_ctype = py_object_type
+ # Reference to C array turns into pointer to first element.
+ while self.type.is_array:
+ self.type = self.type.element_ptr_type()
+ if self.entry.is_pyglobal or self.entry.is_builtin:
+ assert self.type.is_pyobject, "Python global or builtin not a Python object"
+ self.is_temp = 1
+ if Options.intern_names:
+ env.use_utility_code(get_name_interned_utility_code)
+ else:
+ env.use_utility_code(get_name_utility_code)
+
+ def analyse_target_types(self, env):
+ self.check_identifier_kind()
+ if self.is_lvalue():
+ self.type = self.entry.type
+ else:
+ error(self.pos, "Assignment to non-lvalue '%s'"
+ % self.name)
+ self.type = PyrexTypes.error_type
+
+ def check_identifier_kind(self):
+ entry = self.entry
+ if not (entry.is_const or entry.is_variable
+ or entry.is_builtin or entry.is_cfunction):
+ if self.entry.as_variable:
+ self.entry = self.entry.as_variable
+ else:
+ error(self.pos,
+ "'%s' is not a constant, variable or function identifier" % self.name)
+
+ def is_simple(self):
+ # If it's not a C variable, it'll be in a temp.
+ return 1
+
+ def calculate_target_results(self, env):
+ pass
+
+ def check_const(self):
+ entry = self.entry
+ if not (entry.is_const or entry.is_cfunction):
+ self.not_const()
+
+ def check_const_addr(self):
+ entry = self.entry
+ if not (entry.is_cglobal or entry.is_cfunction):
+ self.addr_not_const()
+
+ def is_lvalue(self):
+ return self.entry.is_variable and \
+ not self.entry.type.is_array and \
+ not self.entry.is_readonly
+
+ def is_ephemeral(self):
+ # Name nodes are never ephemeral, even if the
+ # result is in a temporary.
+ return 0
+
+ def calculate_result_code(self):
+ if self.entry is None:
+ return "<error>" # There was an error earlier
+ return self.entry.cname
+
+ def generate_result_code(self, code):
+ if not hasattr(self, 'entry'):
+ error(self.pos, "INTERNAL ERROR: NameNode has no entry attribute during code generation")
+ entry = self.entry
+ if entry is None:
+ return # There was an error earlier
+ if entry.is_pyglobal or entry.is_builtin:
+ if entry.is_builtin:
+ namespace = Naming.builtins_cname
+ else: # entry.is_pyglobal
+ namespace = entry.namespace_cname
+ if Options.intern_names:
+ #assert entry.interned_cname is not None
+ code.putln(
+ '%s = __Pyx_GetName(%s, %s); if (!%s) %s' % (
+ self.result_code,
+ namespace,
+ entry.interned_cname,
+ self.result_code,
+ code.error_goto(self.pos)))
+ else:
+ code.putln(
+ '%s = __Pyx_GetName(%s, "%s"); if (!%s) %s' % (
+ self.result_code,
+ namespace,
+ self.entry.name,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ entry = self.entry
+ if entry is None:
+ return # There was an error earlier
+ if entry.is_pyglobal:
+ namespace = self.entry.namespace_cname
+ if Options.intern_names:
+ code.putln(
+ 'if (PyObject_SetAttr(%s, %s, %s) < 0) %s' % (
+ namespace,
+ entry.interned_cname,
+ rhs.py_result(),
+ code.error_goto(self.pos)))
+ else:
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "%s", %s) < 0) %s' % (
+ namespace,
+ entry.name,
+ rhs.py_result(),
+ code.error_goto(self.pos)))
+ if debug_disposal_code:
+ print "NameNode.generate_assignment_code:"
+ print "...generating disposal code for", rhs
+ rhs.generate_disposal_code(code)
+ else:
+ if self.type.is_pyobject:
+ #print "NameNode.generate_assignment_code: to", self.name ###
+ #print "...from", rhs ###
+ #print "...LHS type", self.type, "ctype", self.ctype() ###
+ #print "...RHS type", rhs.type, "ctype", rhs.ctype() ###
+ rhs.make_owned_reference(code)
+ code.put_decref(self.result_code, self.ctype())
+ code.putln('%s = %s;' % (self.result_code, rhs.result_as(self.ctype())))
+ if debug_disposal_code:
+ print "NameNode.generate_assignment_code:"
+ print "...generating post-assignment code for", rhs
+ rhs.generate_post_assignment_code(code)
+
+ def generate_deletion_code(self, code):
+ if self.entry is None:
+ return # There was an error earlier
+ if not self.entry.is_pyglobal:
+ error(self.pos, "Deletion of local or C global name not supported")
+ return
+ code.putln(
+ 'if (PyObject_DelAttrString(%s, "%s") < 0) %s' % (
+ Naming.module_cname,
+ self.entry.name,
+ code.error_goto(self.pos)))
+
+
+class BackquoteNode(ExprNode):
+ # `expr`
+ #
+ # arg ExprNode
+
+ subexprs = ['arg']
+
+ def analyse_types(self, env):
+ self.arg.analyse_types(env)
+ self.arg = self.arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PyObject_Repr(%s); if (!%s) %s" % (
+ self.result_code,
+ self.arg.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class ImportNode(ExprNode):
+ # Used as part of import statement implementation.
+ # Implements result =
+ # __import__(module_name, globals(), None, name_list)
+ #
+ # module_name StringNode dotted name of module
+ # name_list ListNode or None list of names to be imported
+
+ subexprs = ['module_name', 'name_list']
+
+ def analyse_types(self, env):
+ self.module_name.analyse_types(env)
+ self.module_name = self.module_name.coerce_to_pyobject(env)
+ if self.name_list:
+ self.name_list.analyse_types(env)
+ self.type = py_object_type
+ self.is_temp = 1
+ env.use_utility_code(import_utility_code)
+
+ def generate_result_code(self, code):
+ if self.name_list:
+ name_list_code = self.name_list.py_result()
+ else:
+ name_list_code = "0"
+ code.putln(
+ "%s = __Pyx_Import(%s, %s); if (!%s) %s" % (
+ self.result_code,
+ self.module_name.py_result(),
+ name_list_code,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class IteratorNode(ExprNode):
+ # Used as part of for statement implementation.
+ # Implements result = iter(sequence)
+ #
+ # sequence ExprNode
+
+ subexprs = ['sequence']
+
+ def analyse_types(self, env):
+ self.sequence.analyse_types(env)
+ self.sequence = self.sequence.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PyObject_GetIter(%s); if (!%s) %s" % (
+ self.result_code,
+ self.sequence.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class NextNode(AtomicExprNode):
+ # Used as part of for statement implementation.
+ # Implements result = iterator.next()
+ # Created during analyse_types phase.
+ # The iterator is not owned by this node.
+ #
+ # iterator ExprNode
+
+ def __init__(self, iterator, env):
+ self.pos = iterator.pos
+ self.iterator = iterator
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PyIter_Next(%s);" % (
+ self.result_code,
+ self.iterator.py_result()))
+ code.putln(
+ "if (!%s) {" %
+ self.result_code)
+ code.putln(
+ "if (PyErr_Occurred()) %s" %
+ code.error_goto(self.pos))
+ code.putln(
+ "break;")
+ code.putln(
+ "}")
+
+
+class ExcValueNode(AtomicExprNode):
+ # Node created during analyse_types phase
+ # of an ExceptClauseNode to fetch the current
+ # exception value.
+
+ def __init__(self, pos, env):
+ ExprNode.__init__(self, pos)
+ self.type = py_object_type
+ self.is_temp = 1
+ env.use_utility_code(get_exception_utility_code)
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = __Pyx_GetExcValue(); if (!%s) %s" % (
+ self.result_code,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class TempNode(AtomicExprNode):
+ # Node created during analyse_types phase
+ # of some nodes to hold a temporary value.
+
+ def __init__(self, pos, type, env):
+ ExprNode.__init__(self, pos)
+ self.type = type
+ if type.is_pyobject:
+ self.result_ctype = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ pass
+
+
+class PyTempNode(TempNode):
+ # TempNode holding a Python value.
+
+ def __init__(self, pos, env):
+ TempNode.__init__(self, pos, PyrexTypes.py_object_type, env)
+
+
+#-------------------------------------------------------------------
+#
+# Trailer nodes
+#
+#-------------------------------------------------------------------
+
+class IndexNode(ExprNode):
+ # Sequence indexing.
+ #
+ # base ExprNode
+ # index ExprNode
+
+ subexprs = ['base', 'index']
+
+ def is_ephemeral(self):
+ return self.base.is_ephemeral()
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_types(self, env):
+ self.base.analyse_types(env)
+ self.index.analyse_types(env)
+ if self.base.type.is_pyobject:
+ self.index = self.index.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+ else:
+ if self.base.type.is_ptr or self.base.type.is_array:
+ self.type = self.base.type.base_type
+ else:
+ error(self.pos,
+ "Attempting to index non-array type '%s'" %
+ self.base.type)
+ self.type = PyrexTypes.error_type
+ if self.index.type.is_pyobject:
+ self.index = self.index.coerce_to(
+ PyrexTypes.c_int_type, env)
+ if not self.index.type.is_int:
+ error(self.pos,
+ "Invalid index type '%s'" %
+ self.index.type)
+
+ def check_const_addr(self):
+ self.base.check_const_addr()
+ self.index.check_const()
+
+ def is_lvalue(self):
+ return 1
+
+ def calculate_result_code(self):
+ return "(%s[%s])" % (
+ self.base.result_code, self.index.result_code)
+
+ def generate_result_code(self, code):
+ if self.type.is_pyobject:
+ code.putln(
+ "%s = PyObject_GetItem(%s, %s); if (!%s) %s" % (
+ self.result_code,
+ self.base.py_result(),
+ self.index.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ if self.type.is_pyobject:
+ code.putln(
+ "if (PyObject_SetItem(%s, %s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.index.py_result(),
+ rhs.py_result(),
+ code.error_goto(self.pos)))
+ self.generate_subexpr_disposal_code(code)
+ else:
+ code.putln(
+ "%s = %s;" % (
+ self.result_code, rhs.result_code))
+ rhs.generate_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.generate_subexpr_evaluation_code(code)
+ code.putln(
+ "if (PyObject_DelItem(%s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.index.py_result(),
+ code.error_goto(self.pos)))
+ self.generate_subexpr_disposal_code(code)
+
+
+class SliceIndexNode(ExprNode):
+ # 2-element slice indexing
+ #
+ # base ExprNode
+ # start ExprNode or None
+ # stop ExprNode or None
+
+ subexprs = ['base', 'start', 'stop']
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_types(self, env):
+ self.base.analyse_types(env)
+ if self.start:
+ self.start.analyse_types(env)
+ if self.stop:
+ self.stop.analyse_types(env)
+ self.base = self.base.coerce_to_pyobject(env)
+ c_int = PyrexTypes.c_int_type
+ if self.start:
+ self.start = self.start.coerce_to(c_int, env)
+ if self.stop:
+ self.stop = self.stop.coerce_to(c_int, env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PySequence_GetSlice(%s, %s, %s); if (!%s) %s" % (
+ self.result_code,
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.generate_subexpr_evaluation_code(code)
+ code.putln(
+ "if (PySequence_SetSlice(%s, %s, %s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ rhs.result_code,
+ code.error_goto(self.pos)))
+ self.generate_subexpr_disposal_code(code)
+ rhs.generate_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.generate_subexpr_evaluation_code(code)
+ code.putln(
+ "if (PySequence_DelSlice(%s, %s, %s) < 0) %s" % (
+ self.base.py_result(),
+ self.start_code(),
+ self.stop_code(),
+ code.error_goto(self.pos)))
+ self.generate_subexpr_disposal_code(code)
+
+ def start_code(self):
+ if self.start:
+ return self.start.result_code
+ else:
+ return "0"
+
+ def stop_code(self):
+ if self.stop:
+ return self.stop.result_code
+ else:
+ return "0x7fffffff"
+
+ def calculate_result_code(self):
+ # self.result_code is not used, but this method must exist
+ return "<unused>"
+
+
+class SliceNode(ExprNode):
+ # start:stop:step in subscript list
+ #
+ # start ExprNode
+ # stop ExprNode
+ # step ExprNode
+
+ subexprs = ['start', 'stop', 'step']
+
+ def analyse_types(self, env):
+ self.start.analyse_types(env)
+ self.stop.analyse_types(env)
+ self.step.analyse_types(env)
+ self.start = self.start.coerce_to_pyobject(env)
+ self.stop = self.stop.coerce_to_pyobject(env)
+ self.step = self.step.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PySlice_New(%s, %s, %s); if (!%s) %s" % (
+ self.result_code,
+ self.start.py_result(),
+ self.stop.py_result(),
+ self.step.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class SimpleCallNode(ExprNode):
+ # Function call without keyword, * or ** args.
+ #
+ # function ExprNode
+ # args [ExprNode]
+ # arg_tuple ExprNode or None used internally
+ # self ExprNode or None used internally
+ # coerced_self ExprNode or None used internally
+
+ subexprs = ['self', 'coerced_self', 'function', 'args', 'arg_tuple']
+
+ self = None
+ coerced_self = None
+ arg_tuple = None
+
+ def analyse_types(self, env):
+ function = self.function
+ function.is_called = 1
+ self.function.analyse_types(env)
+ if function.is_attribute and function.entry and function.entry.is_cmethod:
+ # Take ownership of the object from which the attribute
+ # was obtained, because we need to pass it as 'self'.
+ self.self = function.obj
+ function.obj = CloneNode(self.self)
+ func_type = self.function_type()
+ if func_type.is_pyobject:
+ self.arg_tuple = TupleNode(self.pos, args = self.args)
+ self.args = None
+ self.arg_tuple.analyse_types(env)
+ self.type = py_object_type
+ self.is_temp = 1
+ else:
+ for arg in self.args:
+ arg.analyse_types(env)
+ if self.self and func_type.args:
+ # Coerce 'self' to the type expected by the method.
+ expected_type = func_type.args[0].type
+ self.coerced_self = CloneNode(self.self).coerce_to(
+ expected_type, env)
+ # Insert coerced 'self' argument into argument list.
+ self.args.insert(0, self.coerced_self)
+ self.analyse_c_function_call(env)
+
+ def function_type(self):
+ # Return the type of the function being called, coercing a function
+ # pointer to a function if necessary.
+ func_type = self.function.type
+ if func_type.is_ptr:
+ func_type = func_type.base_type
+ return func_type
+
+ def analyse_c_function_call(self, env):
+ func_type = self.function_type()
+ # Check function type
+ if not func_type.is_cfunction:
+ if not func_type.is_error:
+ error(self.pos, "Calling non-function type '%s'" %
+ func_type)
+ self.type = PyrexTypes.error_type
+ self.result_code = "<error>"
+ return
+ # Check no. of args
+ expected_nargs = len(func_type.args)
+ actual_nargs = len(self.args)
+ if actual_nargs < expected_nargs \
+ or (not func_type.has_varargs and actual_nargs > expected_nargs):
+ expected_str = str(expected_nargs)
+ if func_type.has_varargs:
+ expected_str = "at least " + expected_str
+ error(self.pos,
+ "Call with wrong number of arguments (expected %s, got %s)"
+ % (expected_str, actual_nargs))
+ self.args = None
+ self.type = PyrexTypes.error_type
+ self.result_code = "<error>"
+ return
+ # Coerce arguments
+ for i in range(expected_nargs):
+ formal_type = func_type.args[i].type
+ self.args[i] = self.args[i].coerce_to(formal_type, env)
+ for i in range(expected_nargs, actual_nargs):
+ if self.args[i].type.is_pyobject:
+ error(self.args[i].pos,
+ "Python object cannot be passed as a varargs parameter")
+ # Calc result type and code fragment
+ self.type = func_type.return_type
+ if self.type.is_pyobject \
+ or func_type.exception_value is not None \
+ or func_type.exception_check:
+ self.is_temp = 1
+ if self.type.is_pyobject:
+ self.result_ctype = py_object_type
+
+ def calculate_result_code(self):
+ return self.c_call_code()
+
+ def c_call_code(self):
+ func_type = self.function_type()
+ if self.args is None or not func_type.is_cfunction:
+ return "<error>"
+ formal_args = func_type.args
+ arg_list_code = []
+ for (formal_arg, actual_arg) in \
+ zip(formal_args, self.args):
+ arg_code = actual_arg.result_as(formal_arg.type)
+ arg_list_code.append(arg_code)
+ for actual_arg in self.args[len(formal_args):]:
+ arg_list_code.append(actual_arg.result_code)
+ result = "%s(%s)" % (self.function.result_code,
+ join(arg_list_code, ","))
+ return result
+
+ def generate_result_code(self, code):
+ func_type = self.function_type()
+ if func_type.is_pyobject:
+ code.putln(
+ "%s = PyObject_CallObject(%s, %s); if (!%s) %s" % (
+ self.result_code,
+ self.function.py_result(),
+ self.arg_tuple.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+ elif func_type.is_cfunction:
+ exc_checks = []
+ if self.type.is_pyobject:
+ exc_checks.append("!%s" % self.result_code)
+ else:
+ exc_val = func_type.exception_value
+ exc_check = func_type.exception_check
+ if exc_val is not None:
+ exc_checks.append("%s == %s" % (self.result_code, exc_val))
+ if exc_check:
+ exc_checks.append("PyErr_Occurred()")
+ if self.is_temp or exc_checks:
+ rhs = self.c_call_code()
+ if self.result_code:
+ lhs = "%s = " % self.result_code
+ if self.is_temp and self.type.is_pyobject:
+ #return_type = self.type # func_type.return_type
+ #print "SimpleCallNode.generate_result_code: casting", rhs, \
+ # "from", return_type, "to pyobject" ###
+ rhs = typecast(py_object_type, self.type, rhs)
+ else:
+ lhs = ""
+ code.putln(
+ "%s%s; if (%s) %s" % (
+ lhs,
+ rhs,
+ " && ".join(exc_checks),
+ code.error_goto(self.pos)))
+
+
+class GeneralCallNode(ExprNode):
+ # General Python function call, including keyword,
+ # * and ** arguments.
+ #
+ # function ExprNode
+ # positional_args ExprNode Tuple of positional arguments
+ # keyword_args ExprNode or None Dict of keyword arguments
+ # starstar_arg ExprNode or None Dict of extra keyword args
+
+ subexprs = ['function', 'positional_args', 'keyword_args', 'starstar_arg']
+
+ def analyse_types(self, env):
+ self.function.analyse_types(env)
+ self.positional_args.analyse_types(env)
+ if self.keyword_args:
+ self.keyword_args.analyse_types(env)
+ if self.starstar_arg:
+ self.starstar_arg.analyse_types(env)
+ self.function = self.function.coerce_to_pyobject(env)
+ self.positional_args = \
+ self.positional_args.coerce_to_pyobject(env)
+ if self.starstar_arg:
+ self.starstar_arg = \
+ self.starstar_arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ if self.keyword_args and self.starstar_arg:
+ code.putln(
+ "if (PyDict_Update(%s, %s) < 0) %s" % (
+ self.keyword_args.py_result(),
+ self.starstar_arg.py_result(),
+ code.error_goto(self.pos)))
+ keyword_code = self.keyword_args.py_result()
+ elif self.keyword_args:
+ keyword_code = self.keyword_args.py_result()
+ elif self.starstar_arg:
+ keyword_code = self.starstar_arg.py_result()
+ else:
+ keyword_code = None
+ if not keyword_code:
+ call_code = "PyObject_CallObject(%s, %s)" % (
+ self.function.py_result(),
+ self.positional_args.py_result())
+ else:
+ call_code = "PyEval_CallObjectWithKeywords(%s, %s, %s)" % (
+ self.function.py_result(),
+ self.positional_args.py_result(),
+ keyword_code)
+ code.putln(
+ "%s = %s; if (!%s) %s" % (
+ self.result_code,
+ call_code,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class AsTupleNode(ExprNode):
+ # Convert argument to tuple. Used for normalising
+ # the * argument of a function call.
+ #
+ # arg ExprNode
+
+ subexprs = ['arg']
+
+ def analyse_types(self, env):
+ self.arg.analyse_types(env)
+ self.arg = self.arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PySequence_Tuple(%s); if (!%s) %s" % (
+ self.result_code,
+ self.arg.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class AttributeNode(ExprNode):
+ # obj.attribute
+ #
+ # obj ExprNode
+ # attribute string
+ #
+ # Used internally:
+ #
+ # is_py_attr boolean Is a Python getattr operation
+ # member string C name of struct member
+ # is_called boolean Function call is being done on result
+ # entry Entry Symbol table entry of attribute
+ # interned_attr_cname string C name of interned attribute name
+
+ is_attribute = 1
+ subexprs = ['obj']
+
+ type = PyrexTypes.error_type
+ result = "<error>"
+ entry = None
+ is_called = 0
+
+ def analyse_target_declaration(self, env):
+ pass
+
+ def analyse_target_types(self, env):
+ self.analyse_types(env, target = 1)
+
+ def analyse_types(self, env, target = 0):
+ if self.analyse_as_cimported_attribute(env, target):
+ return
+ if not target and self.analyse_as_unbound_cmethod(env):
+ return
+ self.analyse_as_ordinary_attribute(env, target)
+
+ def analyse_as_cimported_attribute(self, env, target):
+ # Try to interpret this as a reference to an imported
+ # C const, type, var or function. If successful, mutates
+ # this node into a NameNode and returns 1, otherwise
+ # returns 0.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and (
+ entry.is_cglobal or entry.is_cfunction
+ or entry.is_type or entry.is_const):
+ self.mutate_into_name_node(env, entry, target)
+ return 1
+ return 0
+
+ def analyse_as_unbound_cmethod(self, env):
+ # Try to interpret this as a reference to an unbound
+ # C method of an extension type. If successful, mutates
+ # this node into a NameNode and returns 1, otherwise
+ # returns 0.
+ type = self.obj.analyse_as_extension_type(env)
+ if type:
+ entry = type.scope.lookup_here(self.attribute)
+ if entry and entry.is_cmethod:
+ # Create a temporary entry describing the C method
+ # as an ordinary function.
+ ubcm_entry = Symtab.Entry(entry.name,
+ "%s->%s" % (type.vtabptr_cname, entry.cname),
+ entry.type)
+ ubcm_entry.is_cfunction = 1
+ ubcm_entry.func_cname = entry.func_cname
+ self.mutate_into_name_node(env, ubcm_entry, None)
+ return 1
+ return 0
+
+ def analyse_as_extension_type(self, env):
+ # Try to interpret this as a reference to an extension type
+ # in a cimported module. Returns the extension type, or None.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and entry.is_type and entry.type.is_extension_type:
+ return entry.type
+ return None
+
+ def analyse_as_module(self, env):
+ # Try to interpret this as a reference to a cimported module
+ # in another cimported module. Returns the module scope, or None.
+ module_scope = self.obj.analyse_as_module(env)
+ if module_scope:
+ entry = module_scope.lookup_here(self.attribute)
+ if entry and entry.as_module:
+ return entry.as_module
+ return None
+
+ def mutate_into_name_node(self, env, entry, target):
+ # Mutate this node into a NameNode and complete the
+ # analyse_types phase.
+ self.__class__ = NameNode
+ self.name = self.attribute
+ self.entry = entry
+ del self.obj
+ del self.attribute
+ if target:
+ NameNode.analyse_target_types(self, env)
+ else:
+ NameNode.analyse_entry(self, env)
+
+ def analyse_as_ordinary_attribute(self, env, target):
+ self.obj.analyse_types(env)
+ self.analyse_attribute(env)
+ if self.entry and self.entry.is_cmethod and not self.is_called:
+ error(self.pos, "C method can only be called")
+ # Reference to C array turns into pointer to first element.
+ while self.type.is_array:
+ self.type = self.type.element_ptr_type()
+ if self.is_py_attr:
+ if not target:
+ self.is_temp = 1
+ self.result_ctype = py_object_type
+
+ def analyse_attribute(self, env):
+ # Look up attribute and set self.type and self.member.
+ self.is_py_attr = 0
+ self.member = self.attribute
+ if self.obj.type.is_string:
+ self.obj = self.obj.coerce_to_pyobject(env)
+ obj_type = self.obj.type
+ if obj_type.is_ptr:
+ obj_type = obj_type.base_type
+ self.op = "->"
+ elif obj_type.is_extension_type:
+ self.op = "->"
+ else:
+ self.op = "."
+ if obj_type.has_attributes:
+ entry = None
+ if obj_type.attributes_known():
+ entry = obj_type.scope.lookup_here(self.attribute)
+ else:
+ error(self.pos,
+ "Cannot select attribute of incomplete type '%s'"
+ % obj_type)
+ obj_type = PyrexTypes.error_type
+ self.entry = entry
+ if entry:
+ if entry.is_variable or entry.is_cmethod:
+ self.type = entry.type
+ self.member = entry.cname
+ return
+ else:
+ # If it's not a variable or C method, it must be a Python
+ # method of an extension type, so we treat it like a Python
+ # attribute.
+ pass
+ # If we get here, the base object is not a struct/union/extension
+ # type, or it is an extension type and the attribute is either not
+ # declared or is declared as a Python method. Treat it as a Python
+ # attribute reference.
+ if obj_type.is_pyobject:
+ self.type = py_object_type
+ self.is_py_attr = 1
+ if Options.intern_names:
+ self.interned_attr_cname = env.intern(self.attribute)
+ else:
+ if not obj_type.is_error:
+ error(self.pos,
+ "Object of type '%s' has no attribute '%s'" %
+ (obj_type, self.attribute))
+
+ def is_simple(self):
+ if self.obj:
+ return self.result_in_temp() or self.obj.is_simple()
+ else:
+ return NameNode.is_simple(self)
+
+ def is_lvalue(self):
+ if self.obj:
+ return 1
+ else:
+ return NameNode.is_lvalue(self)
+
+ def is_ephemeral(self):
+ if self.obj:
+ return self.obj.is_ephemeral()
+ else:
+ return NameNode.is_ephemeral(self)
+
+ def calculate_result_code(self):
+ #print "AttributeNode.calculate_result_code:", self.member ###
+ #print "...obj node =", self.obj, "code", self.obj.result_code ###
+ #print "...obj type", self.obj.type, "ctype", self.obj.ctype() ###
+ obj = self.obj
+ obj_code = obj.result_as(obj.type)
+ #print "...obj_code =", obj_code ###
+ if self.entry and self.entry.is_cmethod:
+ return "((struct %s *)%s%s%s)->%s" % (
+ obj.type.vtabstruct_cname, obj_code, self.op,
+ obj.type.vtabslot_cname, self.member)
+ else:
+ return "%s%s%s" % (obj_code, self.op, self.member)
+
+ def generate_result_code(self, code):
+ if self.is_py_attr:
+ if Options.intern_names:
+ code.putln(
+ '%s = PyObject_GetAttr(%s, %s); if (!%s) %s' % (
+ self.result_code,
+ self.obj.py_result(),
+ self.interned_attr_cname,
+ self.result_code,
+ code.error_goto(self.pos)))
+ else:
+ code.putln(
+ '%s = PyObject_GetAttrString(%s, "%s"); if (!%s) %s' % (
+ self.result_code,
+ self.objpy_result(),
+ self.attribute,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+ def generate_assignment_code(self, rhs, code):
+ self.obj.generate_evaluation_code(code)
+ if self.is_py_attr:
+ if Options.intern_names:
+ code.putln(
+ 'if (PyObject_SetAttr(%s, %s, %s) < 0) %s' % (
+ self.obj.py_result(),
+ self.interned_attr_cname,
+ rhs.py_result(),
+ code.error_goto(self.pos)))
+ else:
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "%s", %s) < 0) %s' % (
+ self.obj.py_result(),
+ self.attribute,
+ rhs.py_result(),
+ code.error_goto(self.pos)))
+ rhs.generate_disposal_code(code)
+ else:
+ #select_code = self.select_code()
+ select_code = self.result_code
+ if self.type.is_pyobject:
+ rhs.make_owned_reference(code)
+ code.put_decref(select_code, self.ctype())
+ code.putln(
+ "%s = %s;" % (
+ select_code,
+ rhs.result_code))
+ rhs.generate_post_assignment_code(code)
+ self.obj.generate_disposal_code(code)
+
+ def generate_deletion_code(self, code):
+ self.obj.generate_evaluation_code(code)
+ if self.is_py_attr:
+ if Options.intern_names:
+ code.putln(
+ 'if (PyObject_DelAttr(%s, %s) < 0) %s' % (
+ self.obj.py_result(),
+ self.interned_attr_cname,
+ code.error_goto(self.pos)))
+ else:
+ code.putln(
+ 'if (PyObject_DelAttrString(%s, "%s") < 0) %s' % (
+ self.obj.py_result(),
+ self.attribute,
+ code.error_goto(self.pos)))
+ else:
+ error(self.pos, "Cannot delete C attribute of extension type")
+ self.obj.generate_disposal_code(code)
+
+#-------------------------------------------------------------------
+#
+# Constructor nodes
+#
+#-------------------------------------------------------------------
+
+class SequenceNode(ExprNode):
+ # Base class for list and tuple constructor nodes.
+ # Contains common code for performing sequence unpacking.
+ #
+ # args [ExprNode]
+ # unpacked_items [ExprNode] or None
+ # coerced_unpacked_items [ExprNode] or None
+
+ subexprs = ['args']
+
+ is_sequence_constructor = 1
+ unpacked_items = None
+
+ def analyse_target_declaration(self, env):
+ for arg in self.args:
+ arg.analyse_target_declaration(env)
+
+ def analyse_types(self, env):
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg.analyse_types(env)
+ self.args[i] = arg.coerce_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def analyse_target_types(self, env):
+ self.unpacked_items = [] # PyTempNode(self.pos, env)
+ self.coerced_unpacked_items = []
+ for arg in self.args:
+ arg.analyse_target_types(env)
+ #node = CloneNode(self.unpacked_item)
+ unpacked_item = PyTempNode(self.pos, env)
+ coerced_unpacked_item = unpacked_item.coerce_to(arg.type, env)
+ self.unpacked_items.append(unpacked_item)
+ self.coerced_unpacked_items.append(coerced_unpacked_item)
+ self.type = py_object_type
+ env.use_utility_code(unpacking_utility_code)
+
+ def allocate_target_temps(self, env):
+ for arg in self.args:
+ arg.allocate_target_temps(env)
+ for node in self.coerced_unpacked_items:
+ node.allocate_temps(env)
+
+ def release_target_temp(self, env):
+ for arg in self.args:
+ arg.release_target_temp(env)
+ for node in self.coerced_unpacked_items:
+ node.release_temp(env)
+
+ def generate_result_code(self, code):
+ self.generate_operation_code(code)
+
+ def generate_assignment_code(self, rhs, code):
+ for i in range(len(self.args)):
+ item = self.unpacked_items[i]
+ unpack_code = "__Pyx_UnpackItem(%s, %s)" % (
+ rhs.py_result(),
+ i)
+ code.putln(
+ "%s = %s; if (!%s) %s" % (
+ item.result_code,
+ typecast(item.ctype(), py_object_type, unpack_code),
+ item.result_code,
+ code.error_goto(self.pos)))
+ value_node = self.coerced_unpacked_items[i]
+ value_node.generate_evaluation_code(code)
+ self.args[i].generate_assignment_code(value_node, code)
+ code.putln(
+ "if (__Pyx_EndUnpack(%s, %s) < 0) %s" % (
+ rhs.py_result(),
+ len(self.args),
+ code.error_goto(self.pos)))
+ if debug_disposal_code:
+ print "UnpackNode.generate_assignment_code:"
+ print "...generating disposal code for", rhs
+ rhs.generate_disposal_code(code)
+
+
+class TupleNode(SequenceNode):
+ # Tuple constructor.
+
+ def generate_operation_code(self, code):
+ code.putln(
+ "%s = PyTuple_New(%s); if (!%s) %s" % (
+ self.result_code,
+ len(self.args),
+ self.result_code,
+ code.error_goto(self.pos)))
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ if not arg.result_in_temp():
+ code.put_incref(arg.result_code, arg.ctype())
+ code.putln(
+ "PyTuple_SET_ITEM(%s, %s, %s);" % (
+ self.result_code,
+ i,
+ arg.py_result()))
+
+ def generate_subexpr_disposal_code(self, code):
+ # We call generate_post_assignment_code here instead
+ # of generate_disposal_code, because values were stored
+ # in the tuple using a reference-stealing operation.
+ for arg in self.args:
+ arg.generate_post_assignment_code(code)
+
+
+class ListNode(SequenceNode):
+ # List constructor.
+
+ def generate_operation_code(self, code):
+ code.putln("%s = PyList_New(%s); if (!%s) %s" %
+ (self.result_code,
+ len(self.args),
+ self.result_code,
+ code.error_goto(self.pos)))
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ #if not arg.is_temp:
+ if not arg.result_in_temp():
+ code.put_incref(arg.result_code, arg.ctype())
+ code.putln("PyList_SET_ITEM(%s, %s, %s);" %
+ (self.result_code,
+ i,
+ arg.py_result()))
+
+ def generate_subexpr_disposal_code(self, code):
+ # We call generate_post_assignment_code here instead
+ # of generate_disposal_code, because values were stored
+ # in the list using a reference-stealing operation.
+ for arg in self.args:
+ arg.generate_post_assignment_code(code)
+
+
+class DictNode(ExprNode):
+ # Dictionary constructor.
+ #
+ # key_value_pairs [(ExprNode, ExprNode)]
+
+ def analyse_types(self, env):
+ new_pairs = []
+ for key, value in self.key_value_pairs:
+ key.analyse_types(env)
+ value.analyse_types(env)
+ key = key.coerce_to_pyobject(env)
+ value = value.coerce_to_pyobject(env)
+ new_pairs.append((key, value))
+ self.key_value_pairs = new_pairs
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def allocate_temps(self, env, result = None):
+ # Custom method used here because key-value
+ # pairs are evaluated and used one at a time.
+ self.allocate_temp(env, result)
+ for key, value in self.key_value_pairs:
+ key.allocate_temps(env)
+ value.allocate_temps(env)
+ key.release_temp(env)
+ value.release_temp(env)
+
+ def generate_evaluation_code(self, code):
+ # Custom method used here because key-value
+ # pairs are evaluated and used one at a time.
+ code.putln(
+ "%s = PyDict_New(); if (!%s) %s" % (
+ self.result_code,
+ self.result_code,
+ code.error_goto(self.pos)))
+ for key, value in self.key_value_pairs:
+ key.generate_evaluation_code(code)
+ value.generate_evaluation_code(code)
+ code.putln(
+ "if (PyDict_SetItem(%s, %s, %s) < 0) %s" % (
+ self.result_code,
+ key.py_result(),
+ value.py_result(),
+ code.error_goto(self.pos)))
+ key.generate_disposal_code(code)
+ value.generate_disposal_code(code)
+
+
+class ClassNode(ExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs a class object given
+ # a name, tuple of bases and class dictionary.
+ #
+ # name ExprNode Name of the class
+ # bases ExprNode Base class tuple
+ # dict ExprNode Class dict (not owned by this node)
+ # doc ExprNode or None Doc string
+ # module_name string Name of defining module
+
+ subexprs = ['name', 'bases', 'doc']
+
+ def analyse_types(self, env):
+ self.name.analyse_types(env)
+ self.name = self.name.coerce_to_pyobject(env)
+ self.bases.analyse_types(env)
+ if self.doc:
+ self.doc.analyse_types(env)
+ self.doc = self.doc.coerce_to_pyobject(env)
+ self.module_name = env.global_scope().qualified_name
+ self.type = py_object_type
+ self.is_temp = 1
+ env.use_utility_code(create_class_utility_code);
+
+ def generate_result_code(self, code):
+ if self.doc:
+ code.putln(
+ 'if (PyDict_SetItemString(%s, "__doc__", %s) < 0) %s' % (
+ self.dict.py_result(),
+ self.doc.py_result(),
+ code.error_goto(self.pos)))
+ code.putln(
+ '%s = __Pyx_CreateClass(%s, %s, %s, "%s"); if (!%s) %s' % (
+ self.result_code,
+ self.bases.py_result(),
+ self.dict.py_result(),
+ self.name.py_result(),
+ self.module_name,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class UnboundMethodNode(ExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs an unbound method
+ # object from a class and a function.
+ #
+ # class_cname string C var holding the class object
+ # function ExprNode Function object
+
+ subexprs = ['function']
+
+ def analyse_types(self, env):
+ self.function.analyse_types(env)
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PyMethod_New(%s, 0, %s); if (!%s) %s" % (
+ self.result_code,
+ self.function.py_result(),
+ self.class_cname,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class PyCFunctionNode(AtomicExprNode):
+ # Helper class used in the implementation of Python
+ # class definitions. Constructs a PyCFunction object
+ # from a PyMethodDef struct.
+ #
+ # pymethdef_cname string PyMethodDef structure
+
+ def analyse_types(self, env):
+ self.type = py_object_type
+ self.is_temp = 1
+
+ def generate_result_code(self, code):
+ code.putln(
+ "%s = PyCFunction_New(&%s, 0); if (!%s) %s" % (
+ self.result_code,
+ self.pymethdef_cname,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+#-------------------------------------------------------------------
+#
+# Unary operator nodes
+#
+#-------------------------------------------------------------------
+
+class UnopNode(ExprNode):
+ # operator string
+ # operand ExprNode
+ #
+ # Processing during analyse_expressions phase:
+ #
+ # analyse_c_operation
+ # Called when the operand is not a pyobject.
+ # - Check operand type and coerce if needed.
+ # - Determine result type and result code fragment.
+ # - Allocate temporary for result if needed.
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ if self.is_py_operation():
+ self.coerce_operand_to_pyobject(env)
+ self.type = py_object_type
+ self.is_temp = 1
+ else:
+ self.analyse_c_operation(env)
+
+ def check_const(self):
+ self.operand.check_const()
+
+ def is_py_operation(self):
+ return self.operand.type.is_pyobject
+
+ def coerce_operand_to_pyobject(self, env):
+ self.operand = self.operand.coerce_to_pyobject(env)
+
+ def generate_result_code(self, code):
+ if self.operand.type.is_pyobject:
+ self.generate_py_operation_code(code)
+ else:
+ if self.is_temp:
+ self.generate_c_operation_code(code)
+
+ def generate_py_operation_code(self, code):
+ function = self.py_operation_function()
+ code.putln(
+ "%s = %s(%s); if (!%s) %s" % (
+ self.result_code,
+ function,
+ self.operand.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+ def type_error(self):
+ if not self.operand.type.is_error:
+ error(self.pos, "Invalid operand type for '%s' (%s)" %
+ (self.operator, self.operand.type))
+ self.type = PyrexTypes.error_type
+
+
+class NotNode(ExprNode):
+ # 'not' operator
+ #
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ self.operand = self.operand.coerce_to_boolean(env)
+ self.type = PyrexTypes.c_int_type
+
+ def calculate_result_code(self):
+ return "(!%s)" % self.operand.result_code
+
+ def generate_result_code(self, code):
+ pass
+
+
+class UnaryPlusNode(UnopNode):
+ # unary '+' operator
+
+ operator = '+'
+
+ def analyse_c_operation(self, env):
+ self.type = self.operand.type
+
+ def py_operation_function(self):
+ return "PyNumber_Positive"
+
+ def calculate_result_code(self):
+ return self.operand.result_code
+
+
+class UnaryMinusNode(UnopNode):
+ # unary '-' operator
+
+ operator = '-'
+
+ def analyse_c_operation(self, env):
+ if self.operand.type.is_numeric:
+ self.type = self.operand.type
+ else:
+ self.type_error()
+
+ def py_operation_function(self):
+ return "PyNumber_Negative"
+
+ def calculate_result_code(self):
+ return "(-%s)" % self.operand.result_code
+
+
+class TildeNode(UnopNode):
+ # unary '~' operator
+
+ def analyse_c_operation(self, env):
+ if self.operand.type.is_int:
+ self.type = self.operand.type
+ else:
+ self.type_error()
+
+ def py_operation_function(self):
+ return "PyNumber_Invert"
+
+ def calculate_result_code(self):
+ return "(~%s)" % self.operand.result_code
+
+
+class AmpersandNode(ExprNode):
+ # The C address-of operator.
+ #
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ argtype = self.operand.type
+ if not (argtype.is_cfunction or self.operand.is_lvalue()):
+ self.error("Taking address of non-lvalue")
+ return
+ if argtype.is_pyobject:
+ self.error("Cannot take address of Python variable")
+ return
+ self.type = PyrexTypes.c_ptr_type(argtype)
+
+ def check_const(self):
+ self.operand.check_const_addr()
+
+ def error(self, mess):
+ error(self.pos, mess)
+ self.type = PyrexTypes.error_type
+ self.result_code = "<error>"
+
+ def calculate_result_code(self):
+ return "(&%s)" % self.operand.result_code
+
+ def generate_result_code(self, code):
+ pass
+
+
+unop_node_classes = {
+ "+": UnaryPlusNode,
+ "-": UnaryMinusNode,
+ "~": TildeNode,
+}
+
+def unop_node(pos, operator, operand):
+ # Construct unnop node of appropriate class for
+ # given operator.
+ return unop_node_classes[operator](pos,
+ operator = operator,
+ operand = operand)
+
+
+class TypecastNode(ExprNode):
+ # C type cast
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ base_type = self.base_type.analyse(env)
+ _, self.type = self.declarator.analyse(base_type, env)
+ self.operand.analyse_types(env)
+ to_py = self.type.is_pyobject
+ from_py = self.operand.type.is_pyobject
+ if from_py and not to_py and self.operand.is_ephemeral():
+ error(self.pos, "Casting temporary Python object to non-Python type")
+ if to_py and not from_py:
+ self.result_ctype = py_object_type
+ self.is_temp = 1
+
+ def check_const(self):
+ self.operand.check_const()
+
+ def calculate_result_code(self):
+ opnd = self.operand
+ result_code = self.type.cast_code(opnd.result_code)
+ return result_code
+
+ def result_as(self, type):
+ if self.type.is_pyobject and not self.is_temp:
+ # Optimise away some unnecessary casting
+ return self.operand.result_as(type)
+ else:
+ return ExprNode.result_as(self, type)
+
+ def generate_result_code(self, code):
+ if self.is_temp:
+ code.putln(
+ "%s = (PyObject *)%s;" % (
+ self.result_code,
+ self.operand.result_code))
+ code.put_incref(self.result_code, self.ctype())
+
+
+class SizeofNode(ExprNode):
+ # Abstract base class for sizeof(x) expression nodes.
+
+ def check_const(self):
+ pass
+
+ def generate_result_code(self, code):
+ pass
+
+
+class SizeofTypeNode(SizeofNode):
+ # C sizeof function applied to a type
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+
+ subexprs = []
+
+ def analyse_types(self, env):
+ base_type = self.base_type.analyse(env)
+ _, arg_type = self.declarator.analyse(base_type, env)
+ self.arg_type = arg_type
+ if arg_type.is_pyobject:
+ error(self.pos, "Cannot take sizeof Python object")
+ elif arg_type.is_void:
+ error(self.pos, "Cannot take sizeof void")
+ elif not arg_type.is_complete():
+ error(self.pos, "Cannot take sizeof incomplete type '%s'" % arg_type)
+ self.type = PyrexTypes.c_int_type
+
+ def calculate_result_code(self):
+ arg_code = self.arg_type.declaration_code("")
+ return "(sizeof(%s))" % arg_code
+
+
+class SizeofVarNode(SizeofNode):
+ # C sizeof function applied to a variable
+ #
+ # operand ExprNode
+
+ subexprs = ['operand']
+
+ def analyse_types(self, env):
+ self.operand.analyse_types(env)
+ self.type = PyrexTypes.c_int_type
+
+ def calculate_result_code(self):
+ return "(sizeof(%s))" % self.operand.result_code
+
+ def generate_result_code(self, code):
+ pass
+
+
+#-------------------------------------------------------------------
+#
+# Binary operator nodes
+#
+#-------------------------------------------------------------------
+
+class BinopNode(ExprNode):
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ #
+ # Processing during analyse_expressions phase:
+ #
+ # analyse_c_operation
+ # Called when neither operand is a pyobject.
+ # - Check operand types and coerce if needed.
+ # - Determine result type and result code fragment.
+ # - Allocate temporary for result if needed.
+
+ subexprs = ['operand1', 'operand2']
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.is_py_operation():
+ self.coerce_operands_to_pyobjects(env)
+ self.type = py_object_type
+ self.is_temp = 1
+ else:
+ self.analyse_c_operation(env)
+
+ def is_py_operation(self):
+ return (self.operand1.type.is_pyobject
+ or self.operand2.type.is_pyobject)
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+
+ def generate_result_code(self, code):
+ #print "BinopNode.generate_result_code:", self.operand1, self.operand2 ###
+ if self.operand1.type.is_pyobject:
+ function = self.py_operation_function()
+ if function == "PyNumber_Power":
+ extra_args = ", Py_None"
+ else:
+ extra_args = ""
+ code.putln(
+ "%s = %s(%s, %s%s); if (!%s) %s" % (
+ self.result_code,
+ function,
+ self.operand1.py_result(),
+ self.operand2.py_result(),
+ extra_args,
+ self.result_code,
+ code.error_goto(self.pos)))
+ else:
+ if self.is_temp:
+ self.generate_c_operation_code(code)
+
+ def type_error(self):
+ if not (self.operand1.type.is_error
+ or self.operand2.type.is_error):
+ error(self.pos, "Invalid operand types for '%s' (%s; %s)" %
+ (self.operator, self.operand1.type,
+ self.operand2.type))
+ self.type = PyrexTypes.error_type
+
+
+class NumBinopNode(BinopNode):
+ # Binary operation taking numeric arguments.
+
+ def analyse_c_operation(self, env):
+ type1 = self.operand1.type
+ type2 = self.operand2.type
+ self.type = self.compute_c_result_type(type1, type2)
+ if not self.type:
+ self.type_error()
+
+ def compute_c_result_type(self, type1, type2):
+ if self.c_types_okay(type1, type2):
+ return PyrexTypes.widest_numeric_type(type1, type2)
+ else:
+ return None
+
+ def c_types_okay(self, type1, type2):
+ return type1.is_numeric and type2.is_numeric
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result_code,
+ self.operator,
+ self.operand2.result_code)
+
+ def py_operation_function(self):
+ return self.py_functions[self.operator]
+
+ py_functions = {
+ "|": "PyNumber_Or",
+ "^": "PyNumber_Xor",
+ "&": "PyNumber_And",
+ "<<": "PyNumber_Lshift",
+ ">>": "PyNumber_Rshift",
+ "+": "PyNumber_Add",
+ "-": "PyNumber_Subtract",
+ "*": "PyNumber_Multiply",
+ "/": "PyNumber_Divide",
+ "%": "PyNumber_Remainder",
+ "**": "PyNumber_Power"
+ }
+
+
+class IntBinopNode(NumBinopNode):
+ # Binary operation taking integer arguments.
+
+ def c_types_okay(self, type1, type2):
+ return type1.is_int and type2.is_int
+
+
+class AddNode(NumBinopNode):
+ # '+' operator.
+
+ def is_py_operation(self):
+ if self.operand1.type.is_string \
+ and self.operand2.type.is_string:
+ return 1
+ else:
+ return NumBinopNode.is_py_operation(self)
+
+ def compute_c_result_type(self, type1, type2):
+ if type1.is_ptr and type2.is_int:
+ return type1
+ elif type1.is_int and type2.is_ptr:
+ return type2
+ else:
+ return NumBinopNode.compute_c_result_type(
+ self, type1, type2)
+
+
+class SubNode(NumBinopNode):
+ # '-' operator.
+
+ def compute_c_result_type(self, type1, type2):
+ if type1.is_ptr and type2.is_int:
+ return type1
+ elif type1.is_ptr and type2.is_ptr:
+ return PyrexTypes.c_int_type
+ else:
+ return NumBinopNode.compute_c_result_type(
+ self, type1, type2)
+
+
+class MulNode(NumBinopNode):
+ # '*' operator.
+
+ def is_py_operation(self):
+ type1 = self.operand1.type
+ type2 = self.operand2.type
+ if (type1.is_string and type2.is_int) \
+ or (type2.is_string and type1.is_int):
+ return 1
+ else:
+ return NumBinopNode.is_py_operation(self)
+
+
+class ModNode(IntBinopNode):
+ # '%' operator.
+
+ def is_py_operation(self):
+ return (self.operand1.type.is_string
+ or self.operand2.type.is_string
+ or IntBinopNode.is_py_operation(self))
+
+
+class PowNode(NumBinopNode):
+ # '**' operator.
+
+ def analyse_types(self, env):
+ env.pow_function_used = 1
+ NumBinopNode.analyse_types(self, env)
+
+ def compute_c_result_type(self, type1, type2):
+ if self.c_types_okay(type1, type2):
+ return PyrexTypes.c_double_type
+ else:
+ return None
+
+ def calculate_result_code(self):
+ return "pow(%s, %s)" % (
+ self.operand1.result_code, self.operand2.result_code)
+
+
+class BoolBinopNode(ExprNode):
+ # Short-circuiting boolean operation.
+ #
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ # temp_bool ExprNode used internally
+
+ temp_bool = None
+
+ subexprs = ['operand1', 'operand2', 'temp_bool']
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.operand1.type.is_pyobject or \
+ self.operand2.type.is_pyobject:
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ self.temp_bool = TempNode(self.pos,
+ PyrexTypes.c_int_type, env)
+ self.type = py_object_type
+ else:
+ self.operand1 = self.operand1.coerce_to_boolean(env)
+ self.operand2 = self.operand2.coerce_to_boolean(env)
+ self.type = PyrexTypes.c_int_type
+ # For what we're about to do, it's vital that
+ # both operands be temp nodes.
+ self.operand1 = self.operand1.coerce_to_temp(env) #CTT
+ self.operand2 = self.operand2.coerce_to_temp(env)
+ # coerce_to_simple does not seem to be sufficient
+ #self.operand1 = self.operand1.coerce_to_simple(env)
+ #self.operand2 = self.operand2.coerce_to_simple(env)
+ self.is_temp = 1
+
+ def allocate_temps(self, env, result_code = None):
+ # We don't need both operands at the same time, and
+ # one of the operands will also be our result. So we
+ # use an allocation strategy here which results in
+ # this node and both its operands sharing the same
+ # result variable. This allows us to avoid some
+ # assignments and increfs/decrefs that would otherwise
+ # be necessary.
+ self.allocate_temp(env, result_code)
+ self.operand1.allocate_temps(env, self.result_code)
+ if self.temp_bool:
+ self.temp_bool.allocate_temp(env)
+ self.temp_bool.release_temp(env)
+ self.operand2.allocate_temps(env, self.result_code)
+ # We haven't called release_temp on either operand,
+ # because although they are temp nodes, they don't own
+ # their result variable. And because they are temp
+ # nodes, any temps in their subnodes will have been
+ # released before their allocate_temps returned.
+ # Therefore, they contain no temp vars that need to
+ # be released.
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result_code,
+ self.py_to_c_op[self.operator],
+ self.operand2.result_code)
+
+ py_to_c_op = {'and': "&&", 'or': "||"}
+
+ def generate_evaluation_code(self, code):
+ self.operand1.generate_evaluation_code(code)
+ test_result = self.generate_operand1_test(code)
+ if self.operator == 'and':
+ sense = ""
+ else:
+ sense = "!"
+ code.putln(
+ "if (%s%s) {" % (
+ sense,
+ test_result))
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_evaluation_code(code)
+ code.putln(
+ "}")
+
+ def generate_operand1_test(self, code):
+ # Generate code to test the truth of the first operand.
+ if self.type.is_pyobject:
+ test_result = self.temp_bool.result_code
+ code.putln(
+ "%s = PyObject_IsTrue(%s); if (%s < 0) %s" % (
+ test_result,
+ self.operand1.py_result(),
+ test_result,
+ code.error_goto(self.pos)))
+ else:
+ test_result = self.operand1.result_code
+ return test_result
+
+
+class CmpNode:
+ # Mixin class containing code common to PrimaryCmpNodes
+ # and CascadedCmpNodes.
+
+ def is_python_comparison(self):
+ return (self.has_python_operands()
+ or (self.cascade and self.cascade.is_python_comparison())
+ or self.operator in ('in', 'not_in'))
+
+ def check_types(self, env, operand1, op, operand2):
+ if not self.types_okay(operand1, op, operand2):
+ error(self.pos, "Invalid types for '%s' (%s, %s)" %
+ (self.operator, operand1.type, operand2.type))
+
+ def types_okay(self, operand1, op, operand2):
+ type1 = operand1.type
+ type2 = operand2.type
+ if type1.is_error or type2.is_error:
+ return 1
+ if type1.is_pyobject: # type2 will be, too
+ return 1
+ elif type1.is_ptr:
+ return type1.is_null_ptr or type2.is_null_ptr \
+ or type1.same_as(type2)
+ elif (type1.is_numeric and type2.is_numeric
+ and op not in ('is', 'is_not')):
+ return 1
+ else:
+ return 0
+
+ def generate_operation_code(self, code, result_code,
+ operand1, op , operand2):
+ if op == 'in' or op == 'not_in':
+ code.putln(
+ "%s = PySequence_Contains(%s, %s); if (%s < 0) %s" % (
+ result_code,
+ operand2.py_result(),
+ operand1.py_result(),
+ result_code,
+ code.error_goto(self.pos)))
+ if op == 'not_in':
+ code.putln(
+ "%s = !%s;" % (
+ result_code, result_code))
+ elif (operand1.type.is_pyobject
+ and op not in ('is', 'is_not')):
+ code.putln(
+ "if (PyObject_Cmp(%s, %s, &%s) < 0) %s" % (
+ operand1.py_result(),
+ operand2.py_result(),
+ result_code,
+ code.error_goto(self.pos)))
+ code.putln(
+ "%s = %s %s 0;" % (
+ result_code, result_code, op))
+ else:
+ code.putln("%s = %s %s %s;" % (
+ result_code,
+ operand1.result_code,
+ self.c_operator(op),
+ operand2.result_code))
+
+ def c_operator(self, op):
+ if op == 'is':
+ return "=="
+ elif op == 'is_not':
+ return "!="
+ else:
+ return op
+
+
+class PrimaryCmpNode(ExprNode, CmpNode):
+ # Non-cascaded comparison or first comparison of
+ # a cascaded sequence.
+ #
+ # operator string
+ # operand1 ExprNode
+ # operand2 ExprNode
+ # cascade CascadedCmpNode
+
+ # We don't use the subexprs mechanism, because
+ # things here are too complicated for it to handle.
+ # Instead, we override all the framework methods
+ # which use it.
+
+ cascade = None
+
+ def analyse_types(self, env):
+ self.operand1.analyse_types(env)
+ self.operand2.analyse_types(env)
+ if self.cascade:
+ self.cascade.analyse_types(env, self.operand2)
+ self.is_pycmp = self.is_python_comparison()
+ if self.is_pycmp:
+ self.coerce_operands_to_pyobjects(env)
+ if self.cascade:
+ #self.operand2 = self.operand2.coerce_to_temp(env) #CTT
+ self.operand2 = self.operand2.coerce_to_simple(env)
+ self.cascade.coerce_cascaded_operands_to_temp(env)
+ self.check_operand_types(env)
+ self.type = PyrexTypes.c_int_type
+ if self.is_pycmp or self.cascade:
+ self.is_temp = 1
+
+ def check_operand_types(self, env):
+ self.check_types(env,
+ self.operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.check_operand_types(env, self.operand2)
+
+ def has_python_operands(self):
+ return (self.operand1.type.is_pyobject
+ or self.operand2.type.is_pyobject)
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand1 = self.operand1.coerce_to_pyobject(env)
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ if self.cascade:
+ self.cascade.coerce_operands_to_pyobjects(env)
+
+ def allocate_subexpr_temps(self, env):
+ self.operand1.allocate_temps(env)
+ self.operand2.allocate_temps(env)
+ if self.cascade:
+ self.cascade.allocate_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ self.operand1.release_temp(env)
+ self.operand2.release_temp(env)
+ if self.cascade:
+ self.cascade.release_subexpr_temps(env)
+
+ def check_const(self):
+ self.operand1.check_const()
+ self.operand2.check_const()
+ if self.cascade:
+ self.not_const()
+
+ def calculate_result_code(self):
+ return "(%s %s %s)" % (
+ self.operand1.result_code,
+ self.c_operator(self.operator),
+ self.operand2.result_code)
+
+ def generate_evaluation_code(self, code):
+ self.operand1.generate_evaluation_code(code)
+ self.operand2.generate_evaluation_code(code)
+ if self.is_temp:
+ self.generate_operation_code(code, self.result_code,
+ self.operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.generate_evaluation_code(code,
+ self.result_code, self.operand2)
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_disposal_code(code)
+
+ def generate_subexpr_disposal_code(self, code):
+ # If this is called, it is a non-cascaded cmp,
+ # so only need to dispose of the two main operands.
+ self.operand1.generate_disposal_code(code)
+ self.operand2.generate_disposal_code(code)
+
+
+class CascadedCmpNode(Node, CmpNode):
+ # A CascadedCmpNode is not a complete expression node. It
+ # hangs off the side of another comparison node, shares
+ # its left operand with that node, and shares its result
+ # with the PrimaryCmpNode at the head of the chain.
+ #
+ # operator string
+ # operand2 ExprNode
+ # cascade CascadedCmpNode
+
+ cascade = None
+
+ def analyse_types(self, env, operand1):
+ self.operand2.analyse_types(env)
+ if self.cascade:
+ self.cascade.analyse_types(env, self.operand2)
+
+ def check_operand_types(self, env, operand1):
+ self.check_types(env,
+ operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.check_operand_types(env, self.operand2)
+
+ def has_python_operands(self):
+ return self.operand2.type.is_pyobject
+
+ def coerce_operands_to_pyobjects(self, env):
+ self.operand2 = self.operand2.coerce_to_pyobject(env)
+ if self.cascade:
+ self.cascade.coerce_operands_to_pyobjects(env)
+
+ def coerce_cascaded_operands_to_temp(self, env):
+ if self.cascade:
+ #self.operand2 = self.operand2.coerce_to_temp(env) #CTT
+ self.operand2 = self.operand2.coerce_to_simple(env)
+ self.cascade.coerce_cascaded_operands_to_temp(env)
+
+ def allocate_subexpr_temps(self, env):
+ self.operand2.allocate_temps(env)
+ if self.cascade:
+ self.cascade.allocate_subexpr_temps(env)
+
+ def release_subexpr_temps(self, env):
+ self.operand2.release_temp(env)
+ if self.cascade:
+ self.cascade.release_subexpr_temps(env)
+
+ def generate_evaluation_code(self, code, result, operand1):
+ code.putln("if (%s) {" % result)
+ self.operand2.generate_evaluation_code(code)
+ self.generate_operation_code(code, result,
+ operand1, self.operator, self.operand2)
+ if self.cascade:
+ self.cascade.generate_evaluation_code(
+ code, result, self.operand2)
+ # Cascaded cmp result is always temp
+ self.operand2.generate_disposal_code(code)
+ code.putln("}")
+
+
+binop_node_classes = {
+ "or": BoolBinopNode,
+ "and": BoolBinopNode,
+ "|": IntBinopNode,
+ "^": IntBinopNode,
+ "&": IntBinopNode,
+ "<<": IntBinopNode,
+ ">>": IntBinopNode,
+ "+": AddNode,
+ "-": SubNode,
+ "*": MulNode,
+ "/": NumBinopNode,
+ "%": ModNode,
+ "**": PowNode
+}
+
+def binop_node(pos, operator, operand1, operand2):
+ # Construct binop node of appropriate class for
+ # given operator.
+ return binop_node_classes[operator](pos,
+ operator = operator,
+ operand1 = operand1,
+ operand2 = operand2)
+
+#-------------------------------------------------------------------
+#
+# Coercion nodes
+#
+# Coercion nodes are special in that they are created during
+# the analyse_types phase of parse tree processing.
+# Their __init__ methods consequently incorporate some aspects
+# of that phase.
+#
+#-------------------------------------------------------------------
+
+class CoercionNode(ExprNode):
+ # Abstract base class for coercion nodes.
+ #
+ # arg ExprNode node being coerced
+
+ subexprs = ['arg']
+
+ def __init__(self, arg):
+ self.pos = arg.pos
+ self.arg = arg
+ if debug_coercion:
+ print self, "Coercing", self.arg
+
+
+class CastNode(CoercionNode):
+ # Wrap a node in a C type cast.
+
+ def __init__(self, arg, new_type):
+ CoercionNode.__init__(self, arg)
+ self.type = new_type
+
+ def calculate_result_code(self):
+ #return "((%s)%s)" % (
+ # self.type.declaration_code(""),
+ # self.arg.result)
+ return self.arg.result_as(self.type)
+
+ def generate_result_code(self, code):
+ self.arg.generate_result_code(code)
+
+
+class PyTypeTestNode(CoercionNode):
+ # This node is used to check that a generic Python
+ # object is an instance of a particular extension type.
+ # This node borrows the result of its argument node.
+
+ def __init__(self, arg, dst_type, env):
+ # The arg is know to be a Python object, and
+ # the dst_type is known to be an extension type.
+ assert dst_type.is_extension_type, "PyTypeTest on non extension type"
+ CoercionNode.__init__(self, arg)
+ self.type = dst_type
+ self.result_ctype = arg.ctype()
+ env.use_utility_code(type_test_utility_code)
+
+ def result_in_temp(self):
+ return self.arg.result_in_temp()
+
+ def is_ephemeral(self):
+ return self.arg.is_ephemeral()
+
+ def calculate_result_code(self):
+ return self.arg.result_code
+
+ def generate_result_code(self, code):
+ if self.type.typeobj_is_available():
+ code.putln(
+ "if (!__Pyx_TypeTest(%s, %s)) %s" % (
+ self.arg.py_result(),
+ self.type.typeptr_cname,
+ code.error_goto(self.pos)))
+ else:
+ error(self.pos, "Cannot test type of extern C class "
+ "without type object name specification")
+
+ def generate_post_assignment_code(self, code):
+ self.arg.generate_post_assignment_code(code)
+
+
+class CoerceToPyTypeNode(CoercionNode):
+ # This node is used to convert a C data type
+ # to a Python object.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = py_object_type
+ self.is_temp = 1
+ if not arg.type.to_py_function:
+ error(arg.pos,
+ "Cannot convert '%s' to Python object" % arg.type)
+
+ def generate_result_code(self, code):
+ function = self.arg.type.to_py_function
+ code.putln('%s = %s(%s); if (!%s) %s' % (
+ self.result_code,
+ function,
+ self.arg.result_code,
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class CoerceFromPyTypeNode(CoercionNode):
+ # This node is used to convert a Python object
+ # to a C data type.
+
+ def __init__(self, result_type, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = result_type
+ self.is_temp = 1
+ if not result_type.from_py_function:
+ error(arg.pos,
+ "Cannot convert Python object to '%s'" % result_type)
+ if self.type.is_string and self.arg.is_ephemeral():
+ error(arg.pos,
+ "Obtaining char * from temporary Python value")
+
+ def generate_result_code(self, code):
+ #opnd = self.arg.py_result()
+ function = self.type.from_py_function
+ code.putln('%s = %s(%s); if (PyErr_Occurred()) %s' % (
+ self.result_code,
+ function,
+ self.arg.py_result(),
+ code.error_goto(self.pos)))
+
+
+class CoerceToBooleanNode(CoercionNode):
+ # This node is used when a result needs to be used
+ # in a boolean context.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = PyrexTypes.c_int_type
+ if arg.type.is_pyobject:
+ self.is_temp = 1
+
+ def check_const(self):
+ if self.is_temp:
+ self.not_const()
+ self.arg.check_const()
+
+ def calculate_result_code(self):
+ return "(%s != 0)" % self.arg.result_code
+
+ def generate_result_code(self, code):
+ if self.arg.type.is_pyobject:
+ code.putln(
+ "%s = PyObject_IsTrue(%s); if (%s < 0) %s" % (
+ self.result_code,
+ self.arg.py_result(),
+ self.result_code,
+ code.error_goto(self.pos)))
+
+
+class CoerceToTempNode(CoercionNode):
+ # This node is used to force the result of another node
+ # to be stored in a temporary. It is only used if the
+ # argument node's result is not already in a temporary.
+
+ def __init__(self, arg, env):
+ CoercionNode.__init__(self, arg)
+ self.type = self.arg.type
+ self.is_temp = 1
+ if self.type.is_pyobject:
+ self.result_ctype = py_object_type
+
+ def generate_result_code(self, code):
+ #self.arg.generate_evaluation_code(code) # Already done
+ # by generic generate_subexpr_evaluation_code!
+ code.putln("%s = %s;" % (
+ self.result_code, self.arg.result_as(self.ctype())))
+ if self.type.is_pyobject:
+ code.put_incref(self.result_code, self.ctype())
+
+
+class CloneNode(CoercionNode):
+ # This node is employed when the result of another node needs
+ # to be used multiple times. The argument node's result must
+ # be in a temporary. This node "borrows" the result from the
+ # argument node, and does not generate any evaluation or
+ # disposal code for it. The original owner of the argument
+ # node is responsible for doing those things.
+
+ subexprs = [] # Arg is not considered a subexpr
+
+ def __init__(self, arg):
+ CoercionNode.__init__(self, arg)
+ self.type = arg.type
+ self.result_ctype = arg.result_ctype
+
+ def calculate_result_code(self):
+ return self.arg.result_code
+
+ #def result_as_extension_type(self):
+ # return self.arg.result_as_extension_type()
+
+ def generate_evaluation_code(self, code):
+ pass
+
+ def generate_result_code(self, code):
+ pass
+
+#------------------------------------------------------------------------------------
+#
+# Runtime support code
+#
+#------------------------------------------------------------------------------------
+
+get_name_utility_code = \
+"""
+static PyObject *__Pyx_GetName(PyObject *dict, char *name) {
+ PyObject *result;
+ result = PyObject_GetAttrString(dict, name);
+ if (!result)
+ PyErr_SetString(PyExc_NameError, name);
+ return result;
+}
+"""
+
+get_name_interned_utility_code = \
+"""
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) {
+ PyObject *result;
+ result = PyObject_GetAttr(dict, name);
+ if (!result)
+ PyErr_SetObject(PyExc_NameError, name);
+ return result;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+import_utility_code = \
+"""
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list) {
+ PyObject *__import__ = 0;
+ PyObject *empty_list = 0;
+ PyObject *module = 0;
+ PyObject *global_dict = 0;
+ PyObject *empty_dict = 0;
+ PyObject *list;
+ __import__ = PyObject_GetAttrString(%(BUILTINS)s, "__import__");
+ if (!__import__)
+ goto bad;
+ if (from_list)
+ list = from_list;
+ else {
+ empty_list = PyList_New(0);
+ if (!empty_list)
+ goto bad;
+ list = empty_list;
+ }
+ global_dict = PyModule_GetDict(%(GLOBALS)s);
+ if (!global_dict)
+ goto bad;
+ empty_dict = PyDict_New();
+ if (!empty_dict)
+ goto bad;
+ module = PyObject_CallFunction(__import__, "OOOO",
+ name, global_dict, empty_dict, list);
+bad:
+ Py_XDECREF(empty_list);
+ Py_XDECREF(__import__);
+ Py_XDECREF(empty_dict);
+ return module;
+}
+""" % {
+ "BUILTINS": Naming.builtins_cname,
+ "GLOBALS": Naming.module_cname,
+}
+
+#------------------------------------------------------------------------------------
+
+get_exception_utility_code = \
+"""
+static PyObject *__Pyx_GetExcValue(void) {
+ PyObject *type = 0, *value = 0, *tb = 0;
+ PyObject *result = 0;
+ PyThreadState *tstate = PyThreadState_Get();
+ PyErr_Fetch(&type, &value, &tb);
+ PyErr_NormalizeException(&type, &value, &tb);
+ if (PyErr_Occurred())
+ goto bad;
+ if (!value) {
+ value = Py_None;
+ Py_INCREF(value);
+ }
+ Py_XDECREF(tstate->exc_type);
+ Py_XDECREF(tstate->exc_value);
+ Py_XDECREF(tstate->exc_traceback);
+ tstate->exc_type = type;
+ tstate->exc_value = value;
+ tstate->exc_traceback = tb;
+ result = value;
+ Py_XINCREF(result);
+ type = 0;
+ value = 0;
+ tb = 0;
+bad:
+ Py_XDECREF(type);
+ Py_XDECREF(value);
+ Py_XDECREF(tb);
+ return result;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+unpacking_utility_code = \
+"""
+static void __Pyx_UnpackError(void) {
+ PyErr_SetString(PyExc_ValueError, "unpack sequence of wrong size");
+}
+
+static PyObject *__Pyx_UnpackItem(PyObject *seq, int i) {
+ PyObject *item;
+ if (!(item = PySequence_GetItem(seq, i))) {
+ if (PyErr_ExceptionMatches(PyExc_IndexError))
+ __Pyx_UnpackError();
+ }
+ return item;
+}
+
+static int __Pyx_EndUnpack(PyObject *seq, int i) {
+ PyObject *item;
+ if (item = PySequence_GetItem(seq, i)) {
+ Py_DECREF(item);
+ __Pyx_UnpackError();
+ return -1;
+ }
+ PyErr_Clear();
+ return 0;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+type_test_utility_code = \
+"""
+static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
+ if (!type) {
+ PyErr_Format(PyExc_SystemError, "Missing type object");
+ return 0;
+ }
+ if (obj == Py_None || PyObject_TypeCheck(obj, type))
+ return 1;
+ PyErr_Format(PyExc_TypeError, "Cannot convert %s to %s",
+ obj->ob_type->tp_name, type->tp_name);
+ return 0;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+create_class_utility_code = \
+"""
+static PyObject *__Pyx_CreateClass(
+ PyObject *bases, PyObject *dict, PyObject *name, char *modname)
+{
+ PyObject *py_modname;
+ PyObject *result = 0;
+
+ py_modname = PyString_FromString(modname);
+ if (!py_modname)
+ goto bad;
+ if (PyDict_SetItemString(dict, "__module__", py_modname) < 0)
+ goto bad;
+ result = PyClass_New(bases, dict, name);
+bad:
+ Py_XDECREF(py_modname);
+ return result;
+}
+"""
+
+#------------------------------------------------------------------------------------
--- /dev/null
+#
+# Pyrex Scanner - Lexical Definitions
+#
+# Changing anything in this file will cause Lexicon.pickle
+# to be rebuilt next time pyrexc is run.
+#
+
+string_prefixes = "cCrR"
+
+def make_lexicon():
+ from Pyrex.Plex import \
+ Str, Any, AnyBut, AnyChar, Rep, Rep1, Opt, Bol, Eol, Eof, \
+ TEXT, IGNORE, State, Lexicon
+ from Scanning import Method
+
+ letter = Any("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_")
+ digit = Any("0123456789")
+ octdigit = Any("01234567")
+ hexdigit = Any("0123456789ABCDEFabcdef")
+ indentation = Bol + Rep(Any(" \t"))
+
+ decimal = Rep1(digit)
+ dot = Str(".")
+ exponent = Any("Ee") + Opt(Any("+-")) + decimal
+ decimal_fract = (decimal + dot + Opt(decimal)) | (dot + decimal)
+
+ name = letter + Rep(letter | digit)
+ intconst = decimal | (Str("0x") + Rep1(hexdigit))
+ longconst = intconst + Str("L")
+ fltconst = (decimal_fract + Opt(exponent)) | (decimal + exponent)
+ imagconst = (intconst | fltconst) + Any("jJ")
+
+ sq_string = (
+ Str("'") +
+ Rep(AnyBut("\\\n'") | (Str("\\") + AnyChar)) +
+ Str("'")
+ )
+
+ dq_string = (
+ Str('"') +
+ Rep(AnyBut('\\\n"') | (Str("\\") + AnyChar)) +
+ Str('"')
+ )
+
+ non_sq = AnyBut("'") | (Str('\\') + AnyChar)
+ tsq_string = (
+ Str("'''")
+ + Rep(non_sq | (Str("'") + non_sq) | (Str("''") + non_sq))
+ + Str("'''")
+ )
+
+ non_dq = AnyBut('"') | (Str('\\') + AnyChar)
+ tdq_string = (
+ Str('"""')
+ + Rep(non_dq | (Str('"') + non_dq) | (Str('""') + non_dq))
+ + Str('"""')
+ )
+ stringlit = Opt(Any(string_prefixes)) + (sq_string | dq_string | tsq_string| tdq_string)
+
+ beginstring = Opt(Any(string_prefixes)) + (Str("'") | Str('"') | Str("'''") | Str('"""'))
+ two_oct = octdigit + octdigit
+ three_oct = octdigit + octdigit + octdigit
+ two_hex = hexdigit + hexdigit
+ escapeseq = Str("\\") + (two_oct | three_oct | two_hex | AnyChar)
+
+ bra = Any("([{")
+ ket = Any(")]}")
+ punct = Any(":,;+-*/|&<>=.%`~^?")
+ diphthong = Str("==", "<>", "!=", "<=", ">=", "<<", ">>", "**")
+ spaces = Rep1(Any(" \t\f"))
+ comment = Str("#") + Rep(AnyBut("\n"))
+ escaped_newline = Str("\\\n")
+ lineterm = Eol + Opt(Str("\n"))
+
+ return Lexicon([
+ (name, 'IDENT'),
+ (intconst, 'INT'),
+ (longconst, 'LONG'),
+ (fltconst, 'FLOAT'),
+ (imagconst, 'IMAG'),
+ (punct | diphthong, TEXT),
+
+ (bra, Method('open_bracket_action')),
+ (ket, Method('close_bracket_action')),
+ (lineterm, Method('newline_action')),
+
+ #(stringlit, 'STRING'),
+ (beginstring, Method('begin_string_action')),
+
+ (comment, IGNORE),
+ (spaces, IGNORE),
+ (escaped_newline, IGNORE),
+
+ State('INDENT', [
+ (Opt(spaces) + Opt(comment) + lineterm, IGNORE),
+ (indentation, Method('indentation_action')),
+ (Eof, Method('eof_action'))
+ ]),
+
+ State('SQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
+ (Str('"'), 'CHARS'),
+ (Str("\n"), Method('unclosed_string_action')),
+ (Str("'"), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('DQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut('"\n\\')), 'CHARS'),
+ (Str("'"), 'CHARS'),
+ (Str("\n"), Method('unclosed_string_action')),
+ (Str('"'), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('TSQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut("'\"\n\\")), 'CHARS'),
+ (Any("'\""), 'CHARS'),
+ (Str("\n"), 'NEWLINE'),
+ (Str("'''"), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ State('TDQ_STRING', [
+ (escapeseq, 'ESCAPE'),
+ (Rep1(AnyBut('"\'\n\\')), 'CHARS'),
+ (Any("'\""), 'CHARS'),
+ (Str("\n"), 'NEWLINE'),
+ (Str('"""'), Method('end_string_action')),
+ (Eof, 'EOF')
+ ]),
+
+ (Eof, Method('eof_action'))
+ ],
+
+ # FIXME: Plex 1.9 needs different args here from Plex 1.1.4
+ #debug_flags = scanner_debug_flags,
+ #debug_file = scanner_dump_file
+ )
+
--- /dev/null
+#
+# Pyrex Top Level
+#
+
+import os, sys
+if sys.version_info[:2] < (2, 2):
+ print >>sys.stderr, "Sorry, Pyrex requires Python 2.2 or later"
+ sys.exit(1)
+
+import os
+from time import time
+import Version
+from Scanning import PyrexScanner
+import Errors
+from Errors import PyrexError, CompileError, error
+import Parsing
+from Symtab import BuiltinScope, ModuleScope
+import Code
+from Pyrex.Utils import replace_suffix
+
+verbose = 0
+
+class Context:
+ # This class encapsulates the context needed for compiling
+ # one or more Pyrex implementation files along with their
+ # associated and imported declaration files. It includes
+ # the root of the module import namespace and the list
+ # of directories to search for include files.
+ #
+ # modules {string : ModuleScope}
+ # include_directories [string]
+
+ def __init__(self, include_directories):
+ self.modules = {"__builtin__" : BuiltinScope()}
+ self.include_directories = include_directories
+
+ def find_module(self, module_name,
+ relative_to = None, pos = None, need_pxd = 1):
+ # Finds and returns the module scope corresponding to
+ # the given relative or absolute module name. If this
+ # is the first time the module has been requested, finds
+ # the corresponding .pxd file and process it.
+ # If relative_to is not None, it must be a module scope,
+ # and the module will first be searched for relative to
+ # that module, provided its name is not a dotted name.
+ debug_find_module = 0
+ if debug_find_module:
+ print "Context.find_module: module_name =", module_name, \
+ "relative_to =", relative_to, "pos =", pos, "need_pxd =", need_pxd
+ scope = None
+ pxd_pathname = None
+ if "." not in module_name and relative_to:
+ if debug_find_module:
+ print "...trying relative import"
+ scope = relative_to.lookup_submodule(module_name)
+ if not scope:
+ qualified_name = relative_to.qualify_name(module_name)
+ pxd_pathname = self.find_pxd_file(qualified_name, pos)
+ if pxd_pathname:
+ scope = relative_to.find_submodule(module_name)
+ if not scope:
+ if debug_find_module:
+ print "...trying absolute import"
+ scope = self
+ for name in module_name.split("."):
+ scope = scope.find_submodule(name)
+ if debug_find_module:
+ print "...scope =", scope
+ if not scope.pxd_file_loaded:
+ if debug_find_module:
+ print "...pxd not loaded"
+ scope.pxd_file_loaded = 1
+ if not pxd_pathname:
+ if debug_find_module:
+ print "...looking for pxd file"
+ pxd_pathname = self.find_pxd_file(module_name, pos)
+ if debug_find_module:
+ print "......found ", pxd_pathname
+ if not pxd_pathname and need_pxd:
+ error(pos, "'%s.pxd' not found" % module_name)
+ if pxd_pathname:
+ try:
+ if debug_find_module:
+ print "Context.find_module: Parsing", pxd_pathname
+ pxd_tree = self.parse(pxd_pathname, scope.type_names, pxd = 1)
+ pxd_tree.analyse_declarations(scope)
+ except CompileError:
+ pass
+ return scope
+
+ def find_pxd_file(self, module_name, pos):
+ # Search include directories for the .pxd file
+ # corresponding to the given (full) module name.
+ pxd_filename = "%s.pxd" % module_name
+ return self.search_include_directories(pxd_filename, pos)
+
+ def find_include_file(self, filename, pos):
+ # Search list of include directories for filename.
+ # Reports an error and returns None if not found.
+ path = self.search_include_directories(filename, pos)
+ if not path:
+ error(pos, "'%s' not found" % filename)
+ return path
+
+ def search_include_directories(self, filename, pos):
+ # Search the list of include directories for the given
+ # file name. If a source file position is given, first
+ # searches the directory containing that file. Returns
+ # None if not found, but does not report an error.
+ dirs = self.include_directories
+ if pos:
+ here_dir = os.path.dirname(pos[0])
+ dirs = [here_dir] + dirs
+ for dir in dirs:
+ path = os.path.join(dir, filename)
+ if os.path.exists(path):
+ return path
+ return None
+
+ def lookup_submodule(self, name):
+ # Look up a top-level module. Returns None if not found.
+ return self.modules.get(name, None)
+
+ def find_submodule(self, name):
+ # Find a top-level module, creating a new one if needed.
+ scope = self.lookup_submodule(name)
+ if not scope:
+ scope = ModuleScope(name,
+ parent_module = None, context = self)
+ self.modules[name] = scope
+ return scope
+
+ def parse(self, source_filename, type_names, pxd):
+ # Parse the given source file and return a parse tree.
+ f = open(source_filename, "rU")
+ s = PyrexScanner(f, source_filename,
+ type_names = type_names, context = self)
+ try:
+ tree = Parsing.p_module(s, pxd)
+ finally:
+ f.close()
+ if Errors.num_errors > 0:
+ raise CompileError
+ return tree
+
+ def extract_module_name(self, path):
+ # Get the module name out of a source file pathname.
+ _, tail = os.path.split(path)
+ name, _ = os.path.splitext(tail)
+ return name
+
+ def compile(self, source, options = None):
+ # Compile a Pyrex implementation file in this context
+ # and return a CompilationResult.
+ if not options:
+ options = default_options
+ result = CompilationResult()
+ cwd = os.getcwd()
+ source = os.path.join(cwd, source)
+ if options.use_listing_file:
+ result.listing_file = replace_suffix(source, ".lis")
+ Errors.open_listing_file(result.listing_file,
+ echo_to_stderr = options.errors_to_stderr)
+ else:
+ Errors.open_listing_file(None)
+ if options.output_file:
+ result.c_file = os.path.join(cwd, options.output_file)
+ else:
+ if options.cplus:
+ c_suffix = ".cpp"
+ else:
+ c_suffix = ".c"
+ result.c_file = replace_suffix(source, c_suffix)
+ module_name = self.extract_module_name(source)
+ initial_pos = (source, 1, 0)
+ scope = self.find_module(module_name, pos = initial_pos, need_pxd = 0)
+ errors_occurred = False
+ try:
+ tree = self.parse(source, scope.type_names, pxd = 0)
+ tree.process_implementation(scope, result)
+ except CompileError:
+ errors_occurred = True
+ Errors.close_listing_file()
+ result.num_errors = Errors.num_errors
+ if result.num_errors > 0:
+ errors_occurred = True
+ if errors_occurred:
+ try:
+ os.unlink(result.c_file)
+ except EnvironmentError:
+ pass
+ result.c_file = None
+ if result.c_file and not options.c_only and c_compile:
+ result.object_file = c_compile(result.c_file,
+ verbose_flag = options.show_version,
+ cplus = options.cplus)
+ if not options.obj_only and c_link:
+ result.extension_file = c_link(result.object_file,
+ extra_objects = options.objects,
+ verbose_flag = options.show_version,
+ cplus = options.cplus)
+ return result
+
+#------------------------------------------------------------------------
+#
+# Main Python entry point
+#
+#------------------------------------------------------------------------
+
+class CompilationOptions:
+ """
+ Options to the Pyrex compiler:
+
+ show_version boolean Display version number
+ use_listing_file boolean Generate a .lis file
+ errors_to_stderr boolean Echo errors to stderr when using .lis
+ include_path [string] Directories to search for include files
+ output_file string Name of generated .c file
+
+ Following options are experimental and only used on MacOSX:
+
+ c_only boolean Stop after generating C file (default)
+ obj_only boolean Stop after compiling to .o file
+ objects [string] Extra .o files to link with
+ cplus boolean Compile as c++ code
+ """
+
+ def __init__(self, defaults = None, **kw):
+ self.include_path = []
+ self.objects = []
+ if defaults:
+ self.__dict__.update(defaults.__dict__)
+ self.__dict__.update(kw)
+
+
+class CompilationResult:
+ """
+ Results from the Pyrex compiler:
+
+ c_file string or None The generated C source file
+ h_file string or None The generated C header file
+ i_file string or None The generated .pxi file
+ listing_file string or None File of error messages
+ object_file string or None Result of compiling the C file
+ extension_file string or None Result of linking the object file
+ num_errors integer Number of compilation errors
+ """
+
+ def __init__(self):
+ self.c_file = None
+ self.h_file = None
+ self.i_file = None
+ self.listing_file = None
+ self.object_file = None
+ self.extension_file = None
+
+
+def compile(source, options = None, c_compile = 0, c_link = 0):
+ """
+ compile(source, options = default_options)
+
+ Compile the given Pyrex implementation file and return
+ a CompilationResult object describing what was produced.
+ """
+ if not options:
+ options = default_options
+ options = CompilationOptions(defaults = options)
+ if c_compile:
+ options.c_only = 0
+ if c_link:
+ options.obj_only = 0
+ context = Context(options.include_path)
+ return context.compile(source, options)
+
+#------------------------------------------------------------------------
+#
+# Main command-line entry point
+#
+#------------------------------------------------------------------------
+
+def main(command_line = 0):
+ args = sys.argv[1:]
+ any_failures = 0
+ if command_line:
+ from CmdLine import parse_command_line
+ options, sources = parse_command_line(args)
+ else:
+ options = default_options
+ sources = args
+ if options.show_version:
+ print >>sys.stderr, "Pyrex version %s" % Version.version
+ context = Context(options.include_path)
+ for source in sources:
+ try:
+ result = context.compile(source, options)
+ if result.num_errors > 0:
+ any_failures = 1
+ except PyrexError, e:
+ print >>sys.stderr, e
+ any_failures = 1
+ if any_failures:
+ sys.exit(1)
+
+#------------------------------------------------------------------------
+#
+# Set the default options depending on the platform
+#
+#------------------------------------------------------------------------
+
+default_options = CompilationOptions(
+ show_version = 0,
+ use_listing_file = 0,
+ errors_to_stderr = 1,
+ c_only = 1,
+ obj_only = 1,
+ cplus = 0,
+ output_file = None)
+
+if sys.platform == "mac":
+ from Pyrex.Mac.MacSystem import c_compile, c_link, CCompilerError
+ default_options.use_listing_file = 1
+elif sys.platform == "darwin":
+ from Pyrex.Mac.DarwinSystem import c_compile, c_link, CCompilerError
+else:
+ c_compile = None
+ c_link = None
+
+
--- /dev/null
+#
+# Pyrex - C naming conventions
+#
+#
+# Prefixes for generating C names.
+# Collected here to facilitate ensuring uniqueness.
+#
+
+pyrex_prefix = "__pyx_"
+
+arg_prefix = pyrex_prefix + "arg_"
+funcdoc_prefix = pyrex_prefix + "doc_"
+enum_prefix = pyrex_prefix + "e_"
+func_prefix = pyrex_prefix + "f_"
+gstab_prefix = pyrex_prefix + "getsets_"
+prop_get_prefix = pyrex_prefix + "getprop_"
+const_prefix = pyrex_prefix + "k"
+label_prefix = pyrex_prefix + "L"
+pymethdef_prefix = pyrex_prefix + "mdef_"
+methtab_prefix = pyrex_prefix + "methods_"
+memtab_prefix = pyrex_prefix + "members_"
+interned_prefix = pyrex_prefix + "n_"
+objstruct_prefix = pyrex_prefix + "obj_"
+typeptr_prefix = pyrex_prefix + "ptype_"
+prop_set_prefix = pyrex_prefix + "setprop_"
+type_prefix = pyrex_prefix + "t_"
+typeobj_prefix = pyrex_prefix + "type_"
+var_prefix = pyrex_prefix + "v_"
+vtable_prefix = pyrex_prefix + "vtable_"
+vtabptr_prefix = pyrex_prefix + "vtabptr_"
+vtabstruct_prefix = pyrex_prefix + "vtabstruct_"
+
+args_cname = pyrex_prefix + "args"
+kwdlist_cname = pyrex_prefix + "argnames"
+obj_base_cname = pyrex_prefix + "base"
+builtins_cname = pyrex_prefix + "b"
+moddict_cname = pyrex_prefix + "d"
+dummy_cname = pyrex_prefix + "dummy"
+filename_cname = pyrex_prefix + "filename"
+filetable_cname = pyrex_prefix + "f"
+filenames_cname = pyrex_prefix + "filenames"
+fileinit_cname = pyrex_prefix + "init_filenames"
+intern_tab_cname = pyrex_prefix + "intern_tab"
+kwds_cname = pyrex_prefix + "kwds"
+lineno_cname = pyrex_prefix + "lineno"
+module_cname = pyrex_prefix + "m"
+moddoc_cname = pyrex_prefix + "mdoc"
+methtable_cname = pyrex_prefix + "methods"
+retval_cname = pyrex_prefix + "r"
+self_cname = pyrex_prefix + "self"
+stringtab_cname = pyrex_prefix + "string_tab"
+vtabslot_cname = pyrex_prefix + "vtab"
+
+extern_c_macro = pyrex_prefix.upper() + "EXTERN_C"
--- /dev/null
+#
+# Pyrex - Parse tree nodes
+#
+
+import os, string, sys, time
+
+import Code
+from Errors import error, InternalError
+import Naming
+import PyrexTypes
+from PyrexTypes import py_object_type, error_type, CTypedefType
+from Symtab import ModuleScope, LocalScope, \
+ StructOrUnionScope, PyClassScope, CClassScope
+import TypeSlots
+import Version
+from Pyrex.Utils import open_new_file, replace_suffix
+import Options
+
+from DebugFlags import debug_disposal_code
+
+class Node:
+ # pos (string, int, int) Source file position
+ # is_name boolean Is a NameNode
+ # is_literal boolean Is a ConstNode
+
+ is_name = 0
+ is_literal = 0
+
+ def __init__(self, pos, **kw):
+ self.pos = pos
+ self.__dict__.update(kw)
+
+ #
+ # There are 3 phases of parse tree processing, applied in order to
+ # all the statements in a given scope-block:
+ #
+ # (1) analyse_declarations
+ # Make symbol table entries for all declarations at the current
+ # level, both explicit (def, cdef, etc.) and implicit (assignment
+ # to an otherwise undeclared name).
+ #
+ # (2) analyse_expressions
+ # Determine the result types of expressions and fill in the
+ # 'type' attribute of each ExprNode. Insert coercion nodes into the
+ # tree where needed to convert to and from Python objects.
+ # Allocate temporary locals for intermediate results. Fill
+ # in the 'result_code' attribute of each ExprNode with a C code
+ # fragment.
+ #
+ # (3) generate_code
+ # Emit C code for all declarations, statements and expressions.
+ # Recursively applies the 3 processing phases to the bodies of
+ # functions.
+ #
+
+ def analyse_declarations(self, env):
+ pass
+
+ def analyse_expressions(self, env):
+ raise InternalError("analyse_expressions not implemented for %s" % \
+ self.__class__.__name__)
+
+ def generate_code(self, code):
+ raise InternalError("generate_code not implemented for %s" % \
+ self.__class__.__name__)
+
+
+class BlockNode:
+ # Mixin class for nodes representing a declaration block.
+
+ def generate_const_definitions(self, env, code):
+ if env.const_entries:
+ code.putln("")
+ for entry in env.const_entries:
+ if not entry.is_interned:
+ code.put_var_declaration(entry, static = 1)
+
+ def generate_interned_name_decls(self, env, code):
+ # Flush accumulated interned names from the global scope
+ # and generate declarations for them.
+ genv = env.global_scope()
+ intern_map = genv.intern_map
+ names = genv.interned_names
+ if names:
+ code.putln("")
+ for name in names:
+ code.putln(
+ "static PyObject *%s;" % intern_map[name])
+ del names[:]
+
+ def generate_py_string_decls(self, env, code):
+ entries = env.pystring_entries
+ if entries:
+ code.putln("")
+ for entry in entries:
+ code.putln(
+ "static PyObject *%s;" % entry.pystring_cname)
+
+
+class ModuleNode(Node, BlockNode):
+ # doc string or None
+ # body StatListNode
+
+ def analyse_declarations(self, env):
+ env.doc = self.doc
+ self.body.analyse_declarations(env)
+
+ def process_implementation(self, env, result):
+ self.analyse_declarations(env)
+ env.check_c_classes()
+ self.body.analyse_expressions(env)
+ env.return_type = PyrexTypes.c_void_type
+ self.generate_c_code(env, result)
+ self.generate_h_code(env, result)
+
+ def generate_h_code(self, env, result):
+ public_vars_and_funcs = []
+ public_extension_types = []
+ for entry in env.var_entries:
+ if entry.visibility == 'public':
+ public_vars_and_funcs.append(entry)
+ for entry in env.cfunc_entries:
+ if entry.visibility == 'public':
+ public_vars_and_funcs.append(entry)
+ for entry in env.c_class_entries:
+ if entry.visibility == 'public':
+ public_extension_types.append(entry)
+ if public_vars_and_funcs or public_extension_types:
+ result.h_file = replace_suffix(result.c_file, ".h")
+ result.i_file = replace_suffix(result.c_file, ".pxi")
+ h_code = Code.CCodeWriter(result.h_file)
+ i_code = Code.PyrexCodeWriter(result.i_file)
+ self.generate_extern_c_macro_definition(h_code)
+ for entry in public_vars_and_funcs:
+ h_code.putln("%s %s;" % (
+ Naming.extern_c_macro,
+ entry.type.declaration_code(
+ entry.cname, dll_linkage = "DL_IMPORT")))
+ i_code.putln("cdef extern %s" %
+ entry.type.declaration_code(entry.cname, pyrex = 1))
+ for entry in public_extension_types:
+ self.generate_cclass_header_code(entry.type, h_code)
+ self.generate_cclass_include_code(entry.type, i_code)
+ h_code.putln("PyMODINIT_FUNC init%s(void);" % env.module_name)
+
+ def generate_cclass_header_code(self, type, h_code):
+ #h_code.putln("extern DL_IMPORT(PyTypeObject) %s;" % type.typeobj_cname)
+ h_code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ type.typeobj_cname))
+ self.generate_obj_struct_definition(type, h_code)
+
+ def generate_cclass_include_code(self, type, i_code):
+ i_code.putln("cdef extern class %s.%s:" % (
+ type.module_name, type.name))
+ i_code.indent()
+ var_entries = type.scope.var_entries
+ if var_entries:
+ for entry in var_entries:
+ i_code.putln("cdef %s" %
+ entry.type.declaration_code(entry.cname, pyrex = 1))
+ else:
+ i_code.putln("pass")
+ i_code.dedent()
+
+ def generate_c_code(self, env, result):
+ modules = []
+ self.find_referenced_modules(env, modules, {})
+ code = Code.CCodeWriter(result.c_file)
+ code.init_labels()
+ self.generate_module_preamble(env, modules, code)
+ for module in modules:
+ self.generate_declarations_for_module(module, code,
+ definition = module is env)
+ code.putln("")
+ code.putln("/* Implementation of %s */" % env.qualified_name)
+ self.generate_const_definitions(env, code)
+ self.generate_interned_name_decls(env, code)
+ self.generate_py_string_decls(env, code)
+ self.body.generate_function_definitions(env, code)
+ self.generate_interned_name_table(env, code)
+ self.generate_py_string_table(env, code)
+ self.generate_typeobj_definitions(env, code)
+ self.generate_method_table(env, code)
+ self.generate_filename_init_prototype(code)
+ self.generate_module_init_func(modules[:-1], env, code)
+ self.generate_filename_table(code)
+ self.generate_utility_functions(env, code)
+ result.c_file_generated = 1
+
+ def find_referenced_modules(self, env, module_list, modules_seen):
+ if env not in modules_seen:
+ modules_seen[env] = 1
+ for imported_module in env.cimported_modules:
+ self.find_referenced_modules(imported_module, module_list, modules_seen)
+ module_list.append(env)
+
+ def generate_module_preamble(self, env, cimported_modules, code):
+ code.putln('/* Generated by Pyrex %s on %s */' % (
+ Version.version, time.asctime()))
+ code.putln('')
+ for filename in env.python_include_files:
+ code.putln('#include "%s"' % filename)
+ code.putln("#ifndef PY_LONG_LONG")
+ code.putln(" #define PY_LONG_LONG LONG_LONG")
+ code.putln("#endif")
+ self.generate_extern_c_macro_definition(code)
+ code.putln("%s double pow(double, double);" % Naming.extern_c_macro)
+ self.generate_includes(env, cimported_modules, code)
+ #for filename in env.include_files:
+ # code.putln('#include "%s"' % filename)
+ code.putln('')
+ code.put(utility_function_predeclarations)
+ if Options.intern_names:
+ code.putln(get_name_interned_predeclaration)
+ else:
+ code.putln(get_name_predeclaration)
+ code.putln('')
+ code.putln('static PyObject *%s;' % env.module_cname)
+ code.putln('static PyObject *%s;' % Naming.builtins_cname)
+ code.putln('static int %s;' % Naming.lineno_cname)
+ code.putln('static char *%s;' % Naming.filename_cname)
+ code.putln('static char **%s;' % Naming.filetable_cname)
+ if env.doc:
+ code.putln('')
+ code.putln('static char %s[] = "%s";' % (env.doc_cname, env.doc))
+
+ def generate_extern_c_macro_definition(self, code):
+ name = Naming.extern_c_macro
+ code.putln("#ifdef __cplusplus")
+ code.putln('#define %s extern "C"' % name)
+ code.putln("#else")
+ code.putln("#define %s extern" % name)
+ code.putln("#endif")
+
+ def generate_includes(self, env, cimported_modules, code):
+ includes = env.include_files[:]
+ for module in cimported_modules:
+ for filename in module.include_files:
+ if filename not in includes:
+ includes.append(filename)
+ for filename in includes:
+ code.putln('#include "%s"' % filename)
+
+ def generate_filename_table(self, code):
+ code.putln("")
+ code.putln("static char *%s[] = {" % Naming.filenames_cname)
+ if code.filename_list:
+ for filename in code.filename_list:
+ filename = os.path.basename(filename)
+ escaped_filename = filename.replace("\\", "\\\\").replace('"', r'\"')
+ code.putln('"%s",' %
+ escaped_filename)
+ else:
+ # Some C compilers don't like an empty array
+ code.putln("0")
+ code.putln("};")
+
+ def generate_declarations_for_module(self, env, code, definition):
+ code.putln("")
+ code.putln("/* Declarations from %s */" % env.qualified_name)
+ self.generate_type_predeclarations(env, code)
+ self.generate_type_definitions(env, code)
+ self.generate_global_declarations(env, code, definition)
+ self.generate_cfunction_predeclarations(env, code)
+
+ def generate_type_predeclarations(self, env, code):
+ pass
+
+ def generate_type_definitions(self, env, code):
+ # Generate definitions of structs/unions/enums.
+ for entry in env.sue_entries:
+ if not entry.in_cinclude:
+ type = entry.type
+ if type.is_struct_or_union:
+ self.generate_struct_union_definition(entry, code)
+ else:
+ self.generate_enum_definition(entry, code)
+ # Generate extension type object struct definitions.
+ for entry in env.c_class_entries:
+ if not entry.in_cinclude:
+ self.generate_typeobject_predeclaration(entry, code)
+ self.generate_obj_struct_definition(entry.type, code)
+ self.generate_exttype_vtable_struct(entry, code)
+ self.generate_exttype_vtabptr_declaration(entry, code)
+
+ def sue_header_footer(self, type, kind, name):
+ if type.typedef_flag:
+ header = "typedef %s {" % kind
+ footer = "} %s;" % name
+ else:
+ header = "%s %s {" % (kind, name)
+ footer = "};"
+ return header, footer
+
+ def generate_struct_union_definition(self, entry, code):
+ type = entry.type
+ scope = type.scope
+ if scope:
+ header, footer = \
+ self.sue_header_footer(type, type.kind, type.cname)
+ code.putln("")
+ code.putln(header)
+ var_entries = scope.var_entries
+ if not var_entries:
+ error(entry.pos,
+ "Empty struct or union definition not allowed outside a"
+ " 'cdef extern from' block")
+ for attr in var_entries:
+ code.putln(
+ "%s;" %
+ attr.type.declaration_code(attr.cname))
+ code.putln(footer)
+
+ def generate_enum_definition(self, entry, code):
+ type = entry.type
+ name = entry.cname or entry.name or ""
+ header, footer = \
+ self.sue_header_footer(type, "enum", name)
+ code.putln("")
+ code.putln(header)
+ enum_values = entry.enum_values
+ if not enum_values:
+ error(entry.pos,
+ "Empty enum definition not allowed outside a"
+ " 'cdef extern from' block")
+ for value_entry in enum_values:
+ if value_entry.value == value_entry.name:
+ code.putln(
+ "%s," %
+ value_entry.cname)
+ else:
+ code.putln(
+ "%s = %s," % (
+ value_entry.cname,
+ value_entry.value))
+ code.putln(footer)
+
+ def generate_typeobject_predeclaration(self, entry, code):
+ code.putln("")
+ name = entry.type.typeobj_cname
+ if name:
+ if entry.visibility == 'extern' and not entry.in_cinclude:
+ code.putln("%s DL_IMPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ name))
+ elif entry.visibility == 'public':
+ #code.putln("DL_EXPORT(PyTypeObject) %s;" % name)
+ code.putln("%s DL_EXPORT(PyTypeObject) %s;" % (
+ Naming.extern_c_macro,
+ name))
+ # ??? Do we really need the rest of this? ???
+ #else:
+ # code.putln("staticforward PyTypeObject %s;" % name)
+
+ def generate_exttype_vtable_struct(self, entry, code):
+ # Generate struct declaration for an extension type's vtable.
+ type = entry.type
+ scope = type.scope
+ if type.vtabstruct_cname:
+ code.putln("")
+ code.putln(
+ "struct %s {" %
+ type.vtabstruct_cname)
+ if type.base_type and type.base_type.vtabstruct_cname:
+ code.putln("struct %s %s;" % (
+ type.base_type.vtabstruct_cname,
+ Naming.obj_base_cname))
+ for method_entry in scope.cfunc_entries:
+ if not method_entry.is_inherited:
+ code.putln(
+ "%s;" % method_entry.type.declaration_code("(*%s)" % method_entry.name))
+ code.putln(
+ "};")
+
+ def generate_exttype_vtabptr_declaration(self, entry, code):
+ # Generate declaration of pointer to an extension type's vtable.
+ type = entry.type
+ if type.vtabptr_cname:
+ code.putln("static struct %s *%s;" % (
+ type.vtabstruct_cname,
+ type.vtabptr_cname))
+
+ def generate_obj_struct_definition(self, type, code):
+ # Generate object struct definition for an
+ # extension type.
+ if not type.scope:
+ return # Forward declared but never defined
+ header, footer = \
+ self.sue_header_footer(type, "struct", type.objstruct_cname)
+ code.putln("")
+ code.putln(header)
+ base_type = type.base_type
+ if base_type:
+ code.putln(
+ "%s%s %s;" % (
+ ("struct ", "")[base_type.typedef_flag],
+ base_type.objstruct_cname,
+ Naming.obj_base_cname))
+ else:
+ code.putln(
+ "PyObject_HEAD")
+ if type.vtabslot_cname and not (type.base_type and type.base_type.vtabslot_cname):
+ code.putln(
+ "struct %s *%s;" % (
+ type.vtabstruct_cname,
+ type.vtabslot_cname))
+ for attr in type.scope.var_entries:
+ code.putln(
+ "%s;" %
+ attr.type.declaration_code(attr.cname))
+ code.putln(footer)
+
+ def generate_global_declarations(self, env, code, definition):
+ code.putln("")
+ for entry in env.c_class_entries:
+ code.putln("static PyTypeObject *%s = 0;" %
+ entry.type.typeptr_cname)
+ code.put_var_declarations(env.var_entries, static = 1,
+ dll_linkage = "DL_EXPORT", definition = definition)
+ code.put_var_declarations(env.default_entries, static = 1)
+
+ def generate_cfunction_predeclarations(self, env, code):
+ for entry in env.cfunc_entries:
+ if not entry.in_cinclude:
+ if entry.visibility == 'public':
+ dll_linkage = "DL_EXPORT"
+ else:
+ dll_linkage = None
+ header = entry.type.declaration_code(entry.cname,
+ dll_linkage = dll_linkage)
+ if entry.visibility <> 'private':
+ storage_class = "%s " % Naming.extern_c_macro
+ else:
+ storage_class = "static "
+ code.putln("%s%s; /*proto*/" % (
+ storage_class,
+ header))
+
+ def generate_typeobj_definitions(self, env, code):
+ full_module_name = env.qualified_name
+ for entry in env.c_class_entries:
+ #print "generate_typeobj_definitions:", entry.name
+ #print "...visibility =", entry.visibility
+ if entry.visibility <> 'extern':
+ type = entry.type
+ scope = type.scope
+ if scope: # could be None if there was an error
+ self.generate_exttype_vtable(scope, code)
+ self.generate_new_function(scope, code)
+ self.generate_dealloc_function(scope, code)
+ self.generate_traverse_function(scope, code)
+ self.generate_clear_function(scope, code)
+ if scope.defines_any(["__getitem__"]):
+ self.generate_getitem_int_function(scope, code)
+ if scope.defines_any(["__setitem__", "__delitem__"]):
+ self.generate_ass_subscript_function(scope, code)
+ if scope.defines_any(["__setslice__", "__delslice__"]):
+ self.generate_ass_slice_function(scope, code)
+ if scope.defines_any(["__getattr__"]):
+ self.generate_getattro_function(scope, code)
+ if scope.defines_any(["__setattr__", "__delattr__"]):
+ self.generate_setattro_function(scope, code)
+ if scope.defines_any(["__get__"]):
+ self.generate_descr_get_function(scope, code)
+ if scope.defines_any(["__set__", "__delete__"]):
+ self.generate_descr_set_function(scope, code)
+ self.generate_property_accessors(scope, code)
+ self.generate_method_table(scope, code)
+ self.generate_member_table(scope, code)
+ self.generate_getset_table(scope, code)
+ self.generate_typeobj_definition(full_module_name, entry, code)
+
+ def generate_exttype_vtable(self, scope, code):
+ # Generate the definition of an extension type's vtable.
+ type = scope.parent_type
+ if type.vtable_cname:
+ code.putln("static struct %s %s;" % (
+ type.vtabstruct_cname,
+ type.vtable_cname))
+
+ def generate_self_cast(self, scope, code):
+ type = scope.parent_type
+ code.putln(
+ "%s = (%s)o;" % (
+ type.declaration_code("p"),
+ type.declaration_code("")))
+
+ def generate_new_function(self, scope, code):
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyTypeObject *t, PyObject *a, PyObject *k) {"
+ % scope.mangle_internal("tp_new"))
+ if base_type:
+ code.putln(
+ "PyObject *o = %s->tp_new(t, a, k);" %
+ base_type.typeptr_cname)
+ else:
+ code.putln(
+ "PyObject *o = (*t->tp_alloc)(t, 0);")
+ self.generate_self_cast(scope, code)
+ type = scope.parent_type
+ if type.vtabslot_cname:
+ code.putln("*(struct %s **)&p->%s = %s;" % (
+ type.vtabstruct_cname,
+ type.vtabslot_cname,
+ type.vtabptr_cname))
+ for entry in scope.var_entries:
+ if entry.type.is_pyobject:
+ code.put_init_var_to_py_none(entry, "p->%s")
+ entry = scope.lookup_here("__new__")
+ if entry:
+ code.putln(
+ "if (%s(o, a, k) < 0) {" %
+ entry.func_cname)
+ code.put_decref_clear("o", py_object_type);
+ code.putln(
+ "}")
+ code.putln(
+ "return o;")
+ code.putln(
+ "}")
+
+ def generate_dealloc_function(self, scope, code):
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static void %s(PyObject *o) {"
+ % scope.mangle_internal("tp_dealloc"))
+ self.generate_self_cast(scope, code)
+ self.generate_usr_dealloc_call(scope, code)
+ for entry in scope.var_entries:
+ if entry.type.is_pyobject:
+ code.put_xdecref("p->%s" % entry.cname, entry.type)
+ if base_type:
+ code.putln(
+ "%s->tp_dealloc(o);" %
+ base_type.typeptr_cname)
+ else:
+ code.putln(
+ "(*o->ob_type->tp_free)(o);")
+ code.putln(
+ "}")
+
+ def generate_usr_dealloc_call(self, scope, code):
+ entry = scope.lookup_here("__dealloc__")
+ if entry:
+ code.putln(
+ "{")
+ code.putln(
+ "PyObject *etype, *eval, *etb;")
+ code.putln(
+ "PyErr_Fetch(&etype, &eval, &etb);")
+ code.putln(
+ "++o->ob_refcnt;")
+ code.putln(
+ "%s(o);" %
+ entry.func_cname)
+ code.putln(
+ "if (PyErr_Occurred()) PyErr_WriteUnraisable(o);")
+ code.putln(
+ "--o->ob_refcnt;")
+ code.putln(
+ "PyErr_Restore(etype, eval, etb);")
+ code.putln(
+ "}")
+
+ def generate_traverse_function(self, scope, code):
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, visitproc v, void *a) {"
+ % scope.mangle_internal("tp_traverse"))
+ code.putln(
+ "int e;")
+ self.generate_self_cast(scope, code)
+ if base_type:
+ code.putln(
+ "e = %s->tp_traverse(o, v, a); if (e) return e;" %
+ base_type.typeptr_cname)
+ for entry in scope.var_entries:
+ if entry.type.is_pyobject:
+ var_code = "p->%s" % entry.cname
+ code.putln(
+ "if (%s) {"
+ % var_code)
+ if entry.type.is_extension_type:
+ var_code = "((PyObject*)%s)" % var_code
+ code.putln(
+ "e = (*v)(%s, a); if (e) return e;"
+ % var_code)
+ code.putln(
+ "}")
+ code.putln(
+ "return 0;")
+ code.putln(
+ "}")
+
+ def generate_clear_function(self, scope, code):
+ base_type = scope.parent_type.base_type
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o) {"
+ % scope.mangle_internal("tp_clear"))
+ self.generate_self_cast(scope, code)
+ if base_type:
+ code.putln(
+ "%s->tp_clear(o);" %
+ base_type.typeptr_cname)
+ for entry in scope.var_entries:
+ if entry.type.is_pyobject:
+ name = "p->%s" % entry.cname
+ code.put_xdecref(name, entry.type)
+ #code.put_init_to_py_none(name)
+ code.put_init_var_to_py_none(entry, "p->%s")
+ code.putln(
+ "return 0;")
+ code.putln(
+ "}")
+
+ def generate_getitem_int_function(self, scope, code):
+ # This function is put into the sq_item slot when
+ # a __getitem__ method is present. It converts its
+ # argument to a Python integer and calls mp_subscript.
+ code.putln(
+ "static PyObject *%s(PyObject *o, int i) {" %
+ scope.mangle_internal("sq_item"))
+ code.putln(
+ "PyObject *r;")
+ code.putln(
+ "PyObject *x = PyInt_FromLong(i); if(!x) return 0;")
+ code.putln(
+ "r = o->ob_type->tp_as_mapping->mp_subscript(o, x);")
+ code.putln(
+ "Py_DECREF(x);")
+ code.putln(
+ "return r;")
+ code.putln(
+ "}")
+
+ def generate_ass_subscript_function(self, scope, code):
+ # Setting and deleting an item are both done through
+ # the ass_subscript method, so we dispatch to user's __setitem__
+ # or __delitem__, or raise an exception.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setitem__")
+ del_entry = scope.lookup_here("__delitem__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
+ scope.mangle_internal("mp_ass_subscript"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, i, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "Subscript assignment not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, i);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "Subscript deletion not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_guarded_basetype_call(
+ self, base_type, substructure, slot, args, code):
+ if base_type:
+ base_tpname = base_type.typeptr_cname
+ if substructure:
+ code.putln(
+ "if (%s->%s && %s->%s->%s)" % (
+ base_tpname, substructure, base_tpname, substructure, slot))
+ code.putln(
+ " return %s->%s->%s(%s);" % (
+ base_tpname, substructure, slot, args))
+ else:
+ code.putln(
+ "if (%s->%s)" % (
+ base_tpname, slot))
+ code.putln(
+ " return %s->%s(%s);" % (
+ base_tpname, slot, args))
+
+ def generate_ass_slice_function(self, scope, code):
+ # Setting and deleting a slice are both done through
+ # the ass_slice method, so we dispatch to user's __setslice__
+ # or __delslice__, or raise an exception.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setslice__")
+ del_entry = scope.lookup_here("__delslice__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, int i, int j, PyObject *v) {" %
+ scope.mangle_internal("sq_ass_slice"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, i, j, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "2-element slice assignment not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, i, j);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
+ code.putln(
+ "PyErr_Format(PyExc_NotImplementedError,")
+ code.putln(
+ ' "2-element slice deletion not supported by %s", o->ob_type->tp_name);')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_getattro_function(self, scope, code):
+ # First try to get the attribute using PyObject_GenericGetAttr.
+ # If that raises an AttributeError, call the user's __getattr__
+ # method.
+ entry = scope.lookup_here("__getattr__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, PyObject *n) {"
+ % scope.mangle_internal("tp_getattro"))
+ code.putln(
+ "PyObject *v = PyObject_GenericGetAttr(o, n);")
+ code.putln(
+ "if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {")
+ code.putln(
+ "PyErr_Clear();")
+ code.putln(
+ "v = %s(o, n);" %
+ entry.func_cname)
+ code.putln(
+ "}")
+ code.putln(
+ "return v;")
+ code.putln(
+ "}")
+
+ def generate_setattro_function(self, scope, code):
+ # Setting and deleting an attribute are both done through
+ # the setattro method, so we dispatch to user's __setattr__
+ # or __delattr__ or fall back on PyObject_GenericSetAttr.
+ base_type = scope.parent_type.base_type
+ set_entry = scope.lookup_here("__setattr__")
+ del_entry = scope.lookup_here("__delattr__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *n, PyObject *v) {" %
+ scope.mangle_internal("tp_setattro"))
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, n, v);" %
+ set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_setattro", "o, n, v", code)
+ code.putln(
+ "return PyObject_GenericSetAttr(o, n, v);")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o, n);" %
+ del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_setattro", "o, n, v", code)
+ code.putln(
+ "return PyObject_GenericSetAttr(o, n, 0);")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_descr_get_function(self, scope, code):
+ # The __get__ function of a descriptor object can be
+ # called with NULL for the second or third arguments
+ # under some circumstances, so we replace them with
+ # None in that case.
+ user_get_entry = scope.lookup_here("__get__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, PyObject *i, PyObject *c) {" %
+ scope.mangle_internal("tp_descr_get"))
+ code.putln(
+ "PyObject *r = 0;")
+ code.putln(
+ "if (!i) i = Py_None;")
+ code.putln(
+ "if (!c) c = Py_None;")
+ #code.put_incref("i", py_object_type)
+ #code.put_incref("c", py_object_type)
+ code.putln(
+ "r = %s(o, i, c);" %
+ user_get_entry.func_cname)
+ #code.put_decref("i", py_object_type)
+ #code.put_decref("c", py_object_type)
+ code.putln(
+ "return r;")
+ code.putln(
+ "}")
+
+ def generate_descr_set_function(self, scope, code):
+ # Setting and deleting are both done through the __set__
+ # method of a descriptor, so we dispatch to user's __set__
+ # or __delete__ or raise an exception.
+ base_type = scope.parent_type.base_type
+ user_set_entry = scope.lookup_here("__set__")
+ user_del_entry = scope.lookup_here("__delete__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *i, PyObject *v) {" %
+ scope.mangle_internal("tp_descr_set"))
+ code.putln(
+ "if (v) {")
+ if user_set_entry:
+ code.putln(
+ "return %s(o, i, v);" %
+ user_set_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_descr_set", "o, i, v", code)
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if user_del_entry:
+ code.putln(
+ "return %s(o, i);" %
+ user_del_entry.func_cname)
+ else:
+ self.generate_guarded_basetype_call(
+ base_type, None, "tp_descr_set", "o, i, v", code)
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__delete__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_property_accessors(self, cclass_scope, code):
+ for entry in cclass_scope.property_entries:
+ property_scope = entry.scope
+ if property_scope.defines_any(["__get__"]):
+ self.generate_property_get_function(entry, code)
+ if property_scope.defines_any(["__set__", "__del__"]):
+ self.generate_property_set_function(entry, code)
+
+ def generate_property_get_function(self, property_entry, code):
+ property_scope = property_entry.scope
+ property_entry.getter_cname = property_scope.parent_scope.mangle(
+ Naming.prop_get_prefix, property_entry.name)
+ get_entry = property_scope.lookup_here("__get__")
+ code.putln("")
+ code.putln(
+ "static PyObject *%s(PyObject *o, void *x) {" %
+ property_entry.getter_cname)
+ code.putln(
+ "return %s(o);" %
+ get_entry.func_cname)
+ code.putln(
+ "}")
+
+ def generate_property_set_function(self, property_entry, code):
+ property_scope = property_entry.scope
+ property_entry.setter_cname = property_scope.parent_scope.mangle(
+ Naming.prop_set_prefix, property_entry.name)
+ set_entry = property_scope.lookup_here("__set__")
+ del_entry = property_scope.lookup_here("__del__")
+ code.putln("")
+ code.putln(
+ "static int %s(PyObject *o, PyObject *v, void *x) {" %
+ property_entry.setter_cname)
+ code.putln(
+ "if (v) {")
+ if set_entry:
+ code.putln(
+ "return %s(o, v);" %
+ set_entry.func_cname)
+ else:
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "else {")
+ if del_entry:
+ code.putln(
+ "return %s(o);" %
+ del_entry.func_cname)
+ else:
+ code.putln(
+ 'PyErr_SetString(PyExc_NotImplementedError, "__del__");')
+ code.putln(
+ "return -1;")
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def generate_typeobj_definition(self, modname, entry, code):
+ type = entry.type
+ scope = type.scope
+ for suite in TypeSlots.substructures:
+ suite.generate_substructure(scope, code)
+ code.putln("")
+ if entry.visibility == 'public':
+ header = "DL_EXPORT(PyTypeObject) %s = {"
+ else:
+ #header = "statichere PyTypeObject %s = {"
+ header = "PyTypeObject %s = {"
+ #code.putln(header % scope.parent_type.typeobj_cname)
+ code.putln(header % type.typeobj_cname)
+ code.putln(
+ "PyObject_HEAD_INIT(0)")
+ code.putln(
+ "0, /*ob_size*/")
+ code.putln(
+ '"%s.%s", /*tp_name*/' % (
+ modname, scope.class_name))
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ #objstruct = "struct %s" % scope.parent_type.objstruct_cname
+ objstruct = "struct %s" % type.objstruct_cname
+ code.putln(
+ "sizeof(%s), /*tp_basicsize*/" %
+ objstruct)
+ code.putln(
+ "0, /*tp_itemsize*/")
+ for slot in TypeSlots.slot_table:
+ slot.generate(scope, code)
+ code.putln(
+ "};")
+
+ def generate_method_table(self, env, code):
+ code.putln("")
+ code.putln(
+ "static struct PyMethodDef %s[] = {" %
+ env.method_table_cname)
+ for entry in env.pyfunc_entries:
+ code.put_pymethoddef(entry, ",")
+ code.putln(
+ "{0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_member_table(self, env, code):
+ #print "ModuleNode.generate_member_table: scope =", env ###
+ if env.public_attr_entries:
+ code.putln("")
+ code.putln(
+ "static struct PyMemberDef %s[] = {" %
+ env.member_table_cname)
+ type = env.parent_type
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ for entry in env.public_attr_entries:
+ type_code = entry.type.pymemberdef_typecode
+ if entry.visibility == 'readonly':
+ flags = "READONLY"
+ else:
+ flags = "0"
+ code.putln('{"%s", %s, %s, %s, 0},' % (
+ entry.name,
+ type_code,
+ "offsetof(%s, %s)" % (objstruct, entry.name),
+ flags))
+ code.putln(
+ "{0, 0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_getset_table(self, env, code):
+ if env.property_entries:
+ code.putln("")
+ code.putln(
+ "static struct PyGetSetDef %s[] = {" %
+ env.getset_table_cname)
+ for entry in env.property_entries:
+ code.putln(
+ '{"%s", %s, %s, %s, 0},' % (
+ entry.name,
+ entry.getter_cname or "0",
+ entry.setter_cname or "0",
+ entry.doc_cname or "0"))
+ code.putln(
+ "{0, 0, 0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_interned_name_table(self, env, code):
+ items = env.intern_map.items()
+ if items:
+ items.sort()
+ code.putln("")
+ code.putln(
+ "static __Pyx_InternTabEntry %s[] = {" %
+ Naming.intern_tab_cname)
+ for (name, cname) in items:
+ code.putln(
+ '{&%s, "%s"},' % (
+ cname,
+ name))
+ code.putln(
+ "{0, 0}")
+ code.putln(
+ "};")
+
+ def generate_py_string_table(self, env, code):
+ entries = env.all_pystring_entries
+ if entries:
+ code.putln("")
+ code.putln(
+ "static __Pyx_StringTabEntry %s[] = {" %
+ Naming.stringtab_cname)
+ for entry in entries:
+ code.putln(
+ "{&%s, %s, sizeof(%s)}," % (
+ entry.pystring_cname,
+ entry.cname,
+ entry.cname))
+ code.putln(
+ "{0, 0, 0}")
+ code.putln(
+ "};")
+
+ def generate_filename_init_prototype(self, code):
+ code.putln("");
+ code.putln("static void %s(void); /*proto*/" % Naming.fileinit_cname)
+
+ def generate_module_init_func(self, imported_modules, env, code):
+ code.putln("")
+ header = "PyMODINIT_FUNC init%s(void)" % env.module_name
+ code.putln("%s; /*proto*/" % header)
+ code.putln("%s {" % header)
+ code.put_var_declarations(env.temp_entries)
+ #code.putln("/*--- Libary function declarations ---*/")
+ env.generate_library_function_declarations(code)
+ self.generate_filename_init_call(code)
+ #code.putln("/*--- Module creation code ---*/")
+ self.generate_module_creation_code(env, code)
+ #code.putln("/*--- Intern code ---*/")
+ self.generate_intern_code(env, code)
+ #code.putln("/*--- String init code ---*/")
+ self.generate_string_init_code(env, code)
+ #code.putln("/*--- Global init code ---*/")
+ self.generate_global_init_code(env, code)
+ #code.putln("/*--- Type import code ---*/")
+ for module in imported_modules:
+ self.generate_type_import_code_for_module(module, env, code)
+ #code.putln("/*--- Type init code ---*/")
+ self.generate_type_init_code(env, code)
+ #code.putln("/*--- Execution code ---*/")
+ self.body.generate_execution_code(code)
+ code.putln("return;")
+ code.put_label(code.error_label)
+ code.put_var_xdecrefs(env.temp_entries)
+ code.putln('__Pyx_AddTraceback("%s");' % (env.qualified_name))
+ env.use_utility_code(traceback_utility_code)
+ code.putln('}')
+
+ def generate_filename_init_call(self, code):
+ code.putln("%s();" % Naming.fileinit_cname)
+
+ def generate_module_creation_code(self, env, code):
+ # Generate code to create the module object and
+ # install the builtins.
+ if env.doc:
+ doc = env.doc_cname
+ else:
+ doc = "0"
+ code.putln(
+ '%s = Py_InitModule4("%s", %s, %s, 0, PYTHON_API_VERSION);' % (
+ env.module_cname,
+ env.module_name,
+ env.method_table_cname,
+ doc))
+ code.putln(
+ "if (!%s) %s;" % (
+ env.module_cname,
+ code.error_goto(self.pos)));
+ code.putln(
+ '%s = PyImport_AddModule("__builtin__");' %
+ Naming.builtins_cname)
+ code.putln(
+ "if (!%s) %s;" % (
+ Naming.builtins_cname,
+ code.error_goto(self.pos)));
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
+ env.module_cname,
+ Naming.builtins_cname,
+ code.error_goto(self.pos)))
+
+ def generate_intern_code(self, env, code):
+ if env.intern_map:
+ env.use_utility_code(init_intern_tab_utility_code);
+ code.putln(
+ "if (__Pyx_InternStrings(%s) < 0) %s;" % (
+ Naming.intern_tab_cname,
+ code.error_goto(self.pos)))
+
+ def generate_string_init_code(self, env, code):
+ if env.all_pystring_entries:
+ env.use_utility_code(init_string_tab_utility_code)
+ code.putln(
+ "if (__Pyx_InitStrings(%s) < 0) %s;" % (
+ Naming.stringtab_cname,
+ code.error_goto(self.pos)))
+
+ def generate_global_init_code(self, env, code):
+ # Generate code to initialise global PyObject *
+ # variables to None.
+ for entry in env.var_entries:
+ if entry.visibility <> 'extern':
+ if entry.type.is_pyobject:
+ code.put_init_var_to_py_none(entry)
+
+ def generate_type_import_code_for_module(self, module, env, code):
+ # Generate type import code for all extension types in
+ # an imported module.
+ if module.c_class_entries:
+ for entry in module.c_class_entries:
+ self.generate_type_import_code(env, entry, code)
+
+ def generate_type_init_code(self, env, code):
+ # Generate type import code for extern extension types
+ # and type ready code for non-extern ones.
+ for entry in env.c_class_entries:
+ if entry.visibility == 'extern':
+ self.generate_type_import_code(env, entry, code)
+ else:
+ self.generate_exttype_vtable_init_code(entry, code)
+ self.generate_type_ready_code(env, entry, code)
+ self.generate_typeptr_assignment_code(entry, code)
+
+ def use_type_import_utility_code(self, env):
+ import ExprNodes
+ env.use_utility_code(type_import_utility_code)
+ env.use_utility_code(ExprNodes.import_utility_code)
+
+ def generate_type_import_code(self, env, entry, code):
+ # Generate code to import the typeobject of an
+ # extension type defined in another module, and
+ # extract its C method table pointer if any.
+ type = entry.type
+ if type.typedef_flag:
+ objstruct = type.objstruct_cname
+ else:
+ objstruct = "struct %s" % type.objstruct_cname
+ code.putln('%s = __Pyx_ImportType("%s", "%s", sizeof(%s)); if (!%s) %s' % (
+ type.typeptr_cname,
+ type.module_name,
+ type.name,
+ objstruct,
+ type.typeptr_cname,
+ code.error_goto(entry.pos)))
+ self.use_type_import_utility_code(env)
+ if type.vtabptr_cname:
+ code.putln(
+ "if (__Pyx_GetVtable(%s->tp_dict, &%s) < 0) %s" % (
+ type.typeptr_cname,
+ type.vtabptr_cname,
+ code.error_goto(entry.pos)))
+ env.use_utility_code(get_vtable_utility_code)
+
+ def generate_type_ready_code(self, env, entry, code):
+ # Generate a call to PyType_Ready for an extension
+ # type defined in this module.
+ type = entry.type
+ typeobj_cname = type.typeobj_cname
+ scope = type.scope
+ if scope: # could be None if there was an error
+ if entry.visibility <> 'extern':
+ for slot in TypeSlots.slot_table:
+ slot.generate_dynamic_init_code(scope, code)
+ code.putln(
+ "if (PyType_Ready(&%s) < 0) %s" % (
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ if type.vtable_cname:
+ code.putln(
+ "if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
+ typeobj_cname,
+ type.vtabptr_cname,
+ code.error_goto(entry.pos)))
+ env.use_utility_code(set_vtable_utility_code)
+ code.putln(
+ 'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s' % (
+ Naming.module_cname,
+ scope.class_name,
+ typeobj_cname,
+ code.error_goto(entry.pos)))
+ weakref_entry = scope.lookup_here("__weakref__")
+ if weakref_entry:
+ if weakref_entry.type is py_object_type:
+ tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
+ code.putln("if (%s == 0) %s = offsetof(struct %s, %s);" % (
+ tp_weaklistoffset,
+ tp_weaklistoffset,
+ type.objstruct_cname,
+ weakref_entry.cname))
+ else:
+ error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
+
+ def generate_exttype_vtable_init_code(self, entry, code):
+ # Generate code to initialise the C method table of an
+ # extension type.
+ type = entry.type
+ if type.vtable_cname:
+ code.putln(
+ "%s = &%s;" % (
+ type.vtabptr_cname,
+ type.vtable_cname))
+ if type.base_type and type.base_type.vtabptr_cname:
+ code.putln(
+ "%s.%s = *%s;" % (
+ type.vtable_cname,
+ Naming.obj_base_cname,
+ type.base_type.vtabptr_cname))
+ for meth_entry in type.scope.cfunc_entries:
+ if meth_entry.func_cname:
+ code.putln(
+ "*(void **)&%s.%s = (void *)%s;" % (
+ type.vtable_cname,
+ meth_entry.cname,
+ meth_entry.func_cname))
+
+ def generate_typeptr_assignment_code(self, entry, code):
+ # Generate code to initialise the typeptr of an extension
+ # type defined in this module to point to its type object.
+ type = entry.type
+ if type.typeobj_cname:
+ code.putln(
+ "%s = &%s;" % (
+ type.typeptr_cname, type.typeobj_cname))
+
+ def generate_utility_functions(self, env, code):
+ code.putln("")
+ code.putln("/* Runtime support code */")
+ code.putln("")
+ code.putln("static void %s(void) {" % Naming.fileinit_cname)
+ code.putln("%s = %s;" %
+ (Naming.filetable_cname, Naming.filenames_cname))
+ code.putln("}")
+ for utility_code in env.utility_code_used:
+ code.put(utility_code)
+
+
+class StatListNode(Node):
+ # stats a list of StatNode
+
+ def analyse_declarations(self, env):
+ #print "StatListNode.analyse_declarations" ###
+ for stat in self.stats:
+ stat.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ #print "StatListNode.analyse_expressions" ###
+ for stat in self.stats:
+ stat.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ #print "StatListNode.generate_function_definitions" ###
+ for stat in self.stats:
+ stat.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ #print "StatListNode.generate_execution_code" ###
+ for stat in self.stats:
+ code.mark_pos(stat.pos)
+ stat.generate_execution_code(code)
+
+
+class StatNode(Node):
+ #
+ # Code generation for statements is split into the following subphases:
+ #
+ # (1) generate_function_definitions
+ # Emit C code for the definitions of any structs,
+ # unions, enums and functions defined in the current
+ # scope-block.
+ #
+ # (2) generate_execution_code
+ # Emit C code for executable statements.
+ #
+
+ def generate_function_definitions(self, env, code):
+ pass
+
+ def generate_execution_code(self, code):
+ raise InternalError("generate_execution_code not implemented for %s" % \
+ self.__class__.__name__)
+
+
+class CDefExternNode(StatNode):
+ # include_file string or None
+ # body StatNode
+
+ def analyse_declarations(self, env):
+ if self.include_file:
+ env.add_include_file(self.include_file)
+ old_cinclude_flag = env.in_cinclude
+ env.in_cinclude = 1
+ self.body.analyse_declarations(env)
+ env.in_cinclude = old_cinclude_flag
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CDeclaratorNode(Node):
+ # Part of a C declaration.
+ #
+ # Processing during analyse_declarations phase:
+ #
+ # analyse
+ # Returns (name, type) pair where name is the
+ # CNameDeclaratorNode of the name being declared
+ # and type is the type it is being declared as.
+ #
+ pass
+
+
+class CNameDeclaratorNode(CDeclaratorNode):
+ # name string The Pyrex name being declared
+ # cname string or None C name, if specified
+
+ def analyse(self, base_type, env):
+ return self, base_type
+
+
+class CPtrDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+
+ def analyse(self, base_type, env):
+ if base_type.is_pyobject:
+ error(self.pos,
+ "Pointer base type cannot be a Python object")
+ ptr_type = PyrexTypes.c_ptr_type(base_type)
+ return self.base.analyse(ptr_type, env)
+
+
+class CArrayDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+ # dimension ExprNode
+
+ def analyse(self, base_type, env):
+ if self.dimension:
+ self.dimension.analyse_const_expression(env)
+ if not self.dimension.type.is_int:
+ error(self.dimension.pos, "Array dimension not integer")
+ #size = self.dimension.value
+ size = self.dimension.result_code
+ else:
+ size = None
+ if not base_type.is_complete():
+ error(self.pos,
+ "Array element type '%s' is incomplete" % base_type)
+ if base_type.is_pyobject:
+ error(self.pos,
+ "Array element cannot be a Python object")
+ array_type = PyrexTypes.c_array_type(base_type, size)
+ return self.base.analyse(array_type, env)
+
+
+class CFuncDeclaratorNode(CDeclaratorNode):
+ # base CDeclaratorNode
+ # args [CArgDeclNode]
+ # has_varargs boolean
+ # exception_value ConstNode
+ # exception_check boolean True if PyErr_Occurred check needed
+
+ def analyse(self, return_type, env):
+ func_type_args = []
+ for arg_node in self.args:
+ name_declarator, type = arg_node.analyse(env)
+ name = name_declarator.name
+ if name_declarator.cname:
+ error(self.pos,
+ "Function argument cannot have C name specification")
+ # Turn *[] argument into **
+ if type.is_array:
+ type = PyrexTypes.c_ptr_type(type.base_type)
+ # Catch attempted C-style func(void) decl
+ if type.is_void:
+ error(arg_node.pos, "Function argument cannot be void")
+ func_type_args.append(
+ PyrexTypes.CFuncTypeArg(name, type, arg_node.pos))
+ if arg_node.default:
+ error(arg_node.pos, "C function argument cannot have default value")
+ exc_val = None
+ exc_check = 0
+ if return_type.is_pyobject \
+ and (self.exception_value or self.exception_check):
+ error(self.pos,
+ "Exception clause not allowed for function returning Python object")
+ else:
+ if self.exception_value:
+ self.exception_value.analyse_const_expression(env)
+ exc_val = self.exception_value.result_code
+ if not return_type.assignable_from(self.exception_value.type):
+ error(self.exception_value.pos,
+ "Exception value incompatible with function return type")
+ exc_check = self.exception_check
+ func_type = PyrexTypes.CFuncType(
+ return_type, func_type_args, self.has_varargs,
+ exception_value = exc_val, exception_check = exc_check)
+ return self.base.analyse(func_type, env)
+
+
+class CArgDeclNode(Node):
+ # Item in a function declaration argument list.
+ #
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # not_none boolean Tagged with 'not None'
+ # default ExprNode or None
+ # default_entry Symtab.Entry Entry for the variable holding the default value
+ # is_self_arg boolean Is the "self" arg of an extension type method
+
+ is_self_arg = 0
+
+ def analyse(self, env):
+ base_type = self.base_type.analyse(env)
+ return self.declarator.analyse(base_type, env)
+
+
+class CBaseTypeNode(Node):
+ # Abstract base class for C base type nodes.
+ #
+ # Processing during analyse_declarations phase:
+ #
+ # analyse
+ # Returns the type.
+
+ pass
+
+
+class CSimpleBaseTypeNode(CBaseTypeNode):
+ # name string
+ # module_path [string] Qualifying name components
+ # is_basic_c_type boolean
+ # signed boolean
+ # longness integer
+ # is_self_arg boolean Is self argument of C method
+
+ def analyse(self, env):
+ # Return type descriptor.
+ type = None
+ if self.is_basic_c_type:
+ type = PyrexTypes.simple_c_type(self.signed, self.longness, self.name)
+ if not type:
+ error(self.pos, "Unrecognised type modifier combination")
+ elif self.name == "object" and not self.module_path:
+ type = py_object_type
+ elif self.name is None:
+ if self.is_self_arg and env.is_c_class_scope:
+ type = env.parent_type
+ else:
+ type = py_object_type
+ else:
+ scope = env
+ for name in self.module_path:
+ entry = scope.find(name, self.pos)
+ if entry and entry.as_module:
+ scope = entry.as_module
+ else:
+ if entry:
+ error(self.pos, "'%s' is not a cimported module" % name)
+ scope = None
+ break
+ if scope:
+ entry = scope.find(self.name, self.pos)
+ if entry and entry.is_type:
+ type = entry.type
+ else:
+ error(self.pos, "'%s' is not a type identifier" % self.name)
+ if type:
+ return type
+ else:
+ return PyrexTypes.error_type
+
+
+class CComplexBaseTypeNode(CBaseTypeNode):
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+
+ def analyse(self, env):
+ base = self.base_type.analyse(env)
+ _, type = self.declarator.analyse(base, env)
+ return type
+
+
+class CVarDefNode(StatNode):
+ # C variable definition or forward/extern function declaration.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # base_type CBaseTypeNode
+ # declarators [CDeclaratorNode]
+
+ def analyse_declarations(self, env, dest_scope = None):
+ if not dest_scope:
+ dest_scope = env
+ base_type = self.base_type.analyse(env)
+ for declarator in self.declarators:
+ name_declarator, type = declarator.analyse(base_type, env)
+ if not type.is_complete():
+ if not (self.visibility == 'extern' and type.is_array):
+ error(declarator.pos,
+ "Variable type '%s' is incomplete" % type)
+ if self.visibility == 'extern' and type.is_pyobject:
+ error(declarator.pos,
+ "Python object cannot be declared extern")
+ name = name_declarator.name
+ cname = name_declarator.cname
+ if type.is_cfunction:
+ dest_scope.declare_cfunction(name, type, declarator.pos,
+ cname = cname, visibility = self.visibility)
+ else:
+ dest_scope.declare_var(name, type, declarator.pos,
+ cname = cname, visibility = self.visibility, is_cdef = 1)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CStructOrUnionDefNode(StatNode):
+ # name string
+ # cname string or None
+ # kind "struct" or "union"
+ # typedef_flag boolean
+ # attributes [CVarDefNode] or None
+ # entry Entry
+
+ def analyse_declarations(self, env):
+ scope = None
+ if self.attributes is not None:
+ scope = StructOrUnionScope()
+ self.entry = env.declare_struct_or_union(
+ self.name, self.kind, scope, self.typedef_flag, self.pos,
+ self.cname)
+ if self.attributes is not None:
+ for attr in self.attributes:
+ attr.analyse_declarations(env, scope)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CEnumDefNode(StatNode):
+ # name string or None
+ # cname string or None
+ # items [CEnumDefItemNode]
+ # typedef_flag boolean
+ # entry Entry
+
+ def analyse_declarations(self, env):
+ self.entry = env.declare_enum(self.name, self.pos,
+ cname = self.cname, typedef_flag = self.typedef_flag)
+ for item in self.items:
+ item.analyse_declarations(env, self.entry)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CEnumDefItemNode(StatNode):
+ # name string
+ # cname string or None
+ # value ExprNode or None
+
+ def analyse_declarations(self, env, enum_entry):
+ if self.value:
+ self.value.analyse_const_expression(env)
+ value = self.value.result_code
+ else:
+ value = self.name
+ entry = env.declare_const(self.name, enum_entry.type,
+ value, self.pos, cname = self.cname)
+ enum_entry.enum_values.append(entry)
+
+
+class CTypeDefNode(StatNode):
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+
+ def analyse_declarations(self, env):
+ base = self.base_type.analyse(env)
+ name_declarator, type = self.declarator.analyse(base, env)
+ name = name_declarator.name
+ cname = name_declarator.cname
+ if env.in_cinclude:
+ type = CTypedefType(cname or name, type)
+ env.declare_type(name, type, self.pos, cname = cname)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FuncDefNode(StatNode, BlockNode):
+ # Base class for function definition nodes.
+ #
+ # return_type PyrexType
+ # #filename string C name of filename string const
+ # entry Symtab.Entry
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_function_definitions(self, env, code):
+ # Generate C code for header and body of function
+ genv = env.global_scope()
+ lenv = LocalScope(name = self.entry.name, outer_scope = genv)
+ #lenv.function_name = self.function_name()
+ lenv.return_type = self.return_type
+ #self.filename = lenv.get_filename_const(self.pos)
+ code.init_labels()
+ self.declare_arguments(lenv)
+ self.body.analyse_declarations(lenv)
+ self.body.analyse_expressions(lenv)
+ # Code for nested function definitions would go here
+ # if we supported them, which we probably won't.
+ # ----- Top-level constants used by this function
+ self.generate_interned_name_decls(lenv, code)
+ self.generate_py_string_decls(lenv, code)
+ #code.putln("")
+ #code.put_var_declarations(lenv.const_entries, static = 1)
+ self.generate_const_definitions(lenv, code)
+ # ----- Function header
+ code.putln("")
+ self.generate_function_header(code,
+ with_pymethdef = env.is_py_class_scope)
+ # ----- Local variable declarations
+ self.generate_argument_declarations(lenv, code)
+ code.put_var_declarations(lenv.var_entries)
+ init = ""
+ if not self.return_type.is_void:
+ code.putln(
+ "%s%s;" %
+ (self.return_type.declaration_code(
+ Naming.retval_cname),
+ init))
+ code.put_var_declarations(lenv.temp_entries)
+ self.generate_keyword_list(code)
+ # ----- Extern library function declarations
+ lenv.generate_library_function_declarations(code)
+ # ----- Fetch arguments
+ self.generate_argument_parsing_code(code)
+ self.generate_argument_increfs(lenv, code)
+ #self.generate_stararg_getting_code(code)
+ self.generate_argument_conversion_code(code)
+ # ----- Initialise local variables
+ for entry in lenv.var_entries:
+ if entry.type.is_pyobject and entry.init_to_none:
+ code.put_init_var_to_py_none(entry)
+ # ----- Check types of arguments
+ self.generate_argument_type_tests(code)
+ # ----- Function body
+ self.body.generate_execution_code(code)
+ # ----- Default return value
+ code.putln("")
+ if self.return_type.is_pyobject:
+ #if self.return_type.is_extension_type:
+ # lhs = "(PyObject *)%s" % Naming.retval_cname
+ #else:
+ lhs = Naming.retval_cname
+ code.put_init_to_py_none(lhs, self.return_type)
+ else:
+ val = self.return_type.default_value
+ if val:
+ code.putln("%s = %s;" % (Naming.retval_cname, val))
+ code.putln("goto %s;" % code.return_label)
+ # ----- Error cleanup
+ code.put_label(code.error_label)
+ code.put_var_xdecrefs(lenv.temp_entries)
+ err_val = self.error_value()
+ exc_check = self.caller_will_check_exceptions()
+ if err_val is not None or exc_check:
+ code.putln(
+ '__Pyx_AddTraceback("%s");' %
+ self.entry.qualified_name)
+ if err_val is not None:
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ err_val))
+ else:
+ code.putln(
+ '__Pyx_WriteUnraisable("%s");' %
+ self.entry.qualified_name)
+ env.use_utility_code(unraisable_exception_utility_code)
+ # ----- Return cleanup
+ code.put_label(code.return_label)
+ code.put_var_decrefs(lenv.var_entries)
+ code.put_var_decrefs(lenv.arg_entries)
+ self.put_stararg_decrefs(code)
+ if not self.return_type.is_void:
+ retval_code = Naming.retval_cname
+ #if self.return_type.is_extension_type:
+ # retval_code = "((%s)%s) " % (
+ # self.return_type.declaration_code(""),
+ # retval_code)
+ code.putln("return %s;" % retval_code)
+ code.putln("}")
+
+ def put_stararg_decrefs(self, code):
+ pass
+
+ def declare_argument(self, env, arg):
+ if arg.type.is_void:
+ error(arg.pos, "Invalid use of 'void'")
+ elif not arg.type.is_complete() and not arg.type.is_array:
+ error(arg.pos,
+ "Argument type '%s' is incomplete" % arg.type)
+ return env.declare_arg(arg.name, arg.type, arg.pos)
+
+ def generate_argument_increfs(self, env, code):
+ # Turn borrowed argument refs into owned refs.
+ # This is necessary, because if the argument is
+ # assigned to, it will be decrefed.
+ for entry in env.arg_entries:
+ code.put_var_incref(entry)
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class CFuncDefNode(FuncDefNode):
+ # C function definition.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # base_type CBaseTypeNode
+ # declarator CDeclaratorNode
+ # body StatListNode
+ #
+ # type CFuncType
+
+ def unqualified_name(self):
+ return self.entry.name
+
+ def analyse_declarations(self, env):
+ base_type = self.base_type.analyse(env)
+ name_declarator, type = self.declarator.analyse(base_type, env)
+ # Remember the actual type according to the function header
+ # written here, because the type in the symbol table entry
+ # may be different if we're overriding a C method inherited
+ # from the base type of an extension type.
+ self.type = type
+ if not type.is_cfunction:
+ error(self.pos,
+ "Suite attached to non-function declaration")
+ name = name_declarator.name
+ cname = name_declarator.cname
+ self.entry = env.declare_cfunction(
+ name, type, self.pos,
+ cname = cname, visibility = self.visibility,
+ defining = self.body is not None)
+ self.return_type = type.return_type
+
+ def declare_arguments(self, env):
+ for arg in self.type.args:
+ if not arg.name:
+ error(arg.pos, "Missing argument name")
+ self.declare_argument(env, arg)
+
+ def generate_function_header(self, code, with_pymethdef):
+ arg_decls = []
+ type = self.type
+ for arg in type.args:
+ arg_decls.append(arg.declaration_code())
+ if type.has_varargs:
+ arg_decls.append("...")
+ if not arg_decls:
+ arg_decls = ["void"]
+ entity = "%s(%s)" % (self.entry.func_cname,
+ string.join(arg_decls, ","))
+ if self.visibility == 'public':
+ dll_linkage = "DL_EXPORT"
+ else:
+ dll_linkage = None
+ header = self.return_type.declaration_code(entity,
+ dll_linkage = dll_linkage)
+ if self.visibility <> 'private':
+ storage_class = "%s " % Naming.extern_c_macro
+ else:
+ storage_class = "static "
+ code.putln("%s%s {" % (
+ storage_class,
+ header))
+
+ def generate_argument_declarations(self, env, code):
+ # Arguments already declared in function header
+ pass
+
+ def generate_keyword_list(self, code):
+ pass
+
+ def generate_argument_parsing_code(self, code):
+ pass
+
+# def generate_stararg_getting_code(self, code):
+# pass
+
+ def generate_argument_conversion_code(self, code):
+ pass
+
+ def generate_argument_type_tests(self, code):
+ pass
+
+ def error_value(self):
+ if self.return_type.is_pyobject:
+ return "0"
+ else:
+ #return None
+ return self.entry.type.exception_value
+
+ def caller_will_check_exceptions(self):
+ return self.entry.type.exception_check
+
+
+class PyArgDeclNode(Node):
+ # Argument which must be a Python object (used
+ # for * and ** arguments).
+ #
+ # name string
+ # entry Symtab.Entry
+
+ pass
+
+
+class DefNode(FuncDefNode):
+ # A Python function definition.
+ #
+ # name string the Python name of the function
+ # args [CArgDeclNode] formal arguments
+ # star_arg PyArgDeclNode or None * argument
+ # starstar_arg PyArgDeclNode or None ** argument
+ # doc string or None
+ # body StatListNode
+ #
+ # The following subnode is constructed internally
+ # when the def statement is inside a Python class definition.
+ #
+ # assmt AssignmentNode Function construction/assignment
+
+ assmt = None
+
+ def analyse_declarations(self, env):
+ for arg in self.args:
+ base_type = arg.base_type.analyse(env)
+ name_declarator, type = \
+ arg.declarator.analyse(base_type, env)
+ arg.name = name_declarator.name
+ if name_declarator.cname:
+ error(self.pos,
+ "Python function argument cannot have C name specification")
+ arg.type = type.as_argument_type()
+ arg.hdr_type = None
+ arg.needs_conversion = 0
+ arg.needs_type_test = 0
+ arg.is_generic = 1
+ if arg.not_none and not arg.type.is_extension_type:
+ error(self.pos,
+ "Only extension type arguments can have 'not None'")
+ self.declare_pyfunction(env)
+ self.analyse_signature(env)
+ self.return_type = self.entry.signature.return_type()
+ if self.star_arg or self.starstar_arg:
+ env.use_utility_code(get_starargs_utility_code)
+
+ def analyse_signature(self, env):
+ any_type_tests_needed = 0
+ sig = self.entry.signature
+ nfixed = sig.num_fixed_args()
+ for i in range(nfixed):
+ if i < len(self.args):
+ arg = self.args[i]
+ arg.is_generic = 0
+ if sig.is_self_arg(i):
+ arg.is_self_arg = 1
+ arg.hdr_type = arg.type = env.parent_type
+ arg.needs_conversion = 0
+ else:
+ arg.hdr_type = sig.fixed_arg_type(i)
+ if not arg.type.same_as(arg.hdr_type):
+ if arg.hdr_type.is_pyobject and arg.type.is_pyobject:
+ arg.needs_type_test = 1
+ any_type_tests_needed = 1
+ else:
+ arg.needs_conversion = 1
+ if arg.needs_conversion:
+ arg.hdr_cname = Naming.arg_prefix + arg.name
+ else:
+ arg.hdr_cname = Naming.var_prefix + arg.name
+ else:
+ self.bad_signature()
+ return
+ if nfixed < len(self.args):
+ if not sig.has_generic_args:
+ self.bad_signature()
+ for arg in self.args:
+ if arg.is_generic and arg.type.is_extension_type:
+ arg.needs_type_test = 1
+ any_type_tests_needed = 1
+ if any_type_tests_needed:
+ env.use_utility_code(arg_type_test_utility_code)
+
+ def bad_signature(self):
+ sig = self.entry.signature
+ expected_str = "%d" % sig.num_fixed_args()
+ if sig.has_generic_args:
+ expected_str = expected_str + " or more"
+ name = self.name
+ if name.startswith("__") and name.endswith("__"):
+ desc = "Special method"
+ else:
+ desc = "Method"
+ error(self.pos,
+ "%s %s has wrong number of arguments "
+ "(%d declared, %s expected)" % (
+ desc, self.name, len(self.args), expected_str))
+
+ def declare_pyfunction(self, env):
+ self.entry = env.declare_pyfunction(self.name, self.pos)
+ self.entry.doc = self.doc
+ self.entry.func_cname = \
+ Naming.func_prefix + env.scope_prefix + self.name
+ self.entry.doc_cname = \
+ Naming.funcdoc_prefix + env.scope_prefix + self.name
+ self.entry.pymethdef_cname = \
+ Naming.pymethdef_prefix + env.scope_prefix + self.name
+
+ def declare_arguments(self, env):
+ for arg in self.args:
+ if not arg.name:
+ error(arg.pos, "Missing argument name")
+ if arg.needs_conversion:
+ arg.entry = env.declare_var(arg.name, arg.type, arg.pos)
+ if arg.type.is_pyobject:
+ arg.entry.init = "0"
+ arg.entry.init_to_none = 0
+ else:
+ arg.entry = self.declare_argument(env, arg)
+ arg.entry.is_self_arg = arg.is_self_arg
+ if arg.hdr_type:
+ if arg.is_self_arg or \
+ (arg.type.is_extension_type and not arg.hdr_type.is_extension_type):
+ arg.entry.is_declared_generic = 1
+ self.declare_python_arg(env, self.star_arg)
+ self.declare_python_arg(env, self.starstar_arg)
+
+ def declare_python_arg(self, env, arg):
+ if arg:
+ arg.entry = env.declare_var(arg.name,
+ PyrexTypes.py_object_type, arg.pos)
+ arg.entry.init = "0"
+ arg.entry.init_to_none = 0
+ arg.entry.xdecref_cleanup = 1
+
+ def analyse_expressions(self, env):
+ self.analyse_default_values(env)
+ if env.is_py_class_scope:
+ self.synthesize_assignment_node(env)
+
+ def analyse_default_values(self, env):
+ for arg in self.args:
+ if arg.default:
+ if arg.is_generic:
+ arg.default.analyse_types(env)
+ arg.default = arg.default.coerce_to(arg.type, env)
+ arg.default.allocate_temps(env)
+ arg.default_entry = env.add_default_value(arg.type)
+ else:
+ error(arg.pos,
+ "This argument cannot have a default value")
+ arg.default = None
+
+ def synthesize_assignment_node(self, env):
+ import ExprNodes
+ self.assmt = SingleAssignmentNode(self.pos,
+ lhs = ExprNodes.NameNode(self.pos, name = self.name),
+ rhs = ExprNodes.UnboundMethodNode(self.pos,
+ class_cname = env.class_obj_cname,
+ function = ExprNodes.PyCFunctionNode(self.pos,
+ pymethdef_cname = self.entry.pymethdef_cname)))
+ self.assmt.analyse_declarations(env)
+ self.assmt.analyse_expressions(env)
+
+ def generate_function_header(self, code, with_pymethdef):
+ arg_code_list = []
+ sig = self.entry.signature
+ if sig.has_dummy_arg:
+ arg_code_list.append(
+ "PyObject *%s" % Naming.self_cname)
+ for arg in self.args:
+ if not arg.is_generic:
+ if arg.is_self_arg:
+ arg_code_list.append("PyObject *%s" % arg.hdr_cname)
+ else:
+ arg_code_list.append(
+ arg.hdr_type.declaration_code(arg.hdr_cname))
+ if sig.has_generic_args:
+ arg_code_list.append(
+ "PyObject *%s, PyObject *%s"
+ % (Naming.args_cname, Naming.kwds_cname))
+ arg_code = ", ".join(arg_code_list)
+ dc = self.return_type.declaration_code(self.entry.func_cname)
+ header = "static %s(%s)" % (dc, arg_code)
+ code.putln("%s; /*proto*/" % header)
+ if self.entry.doc:
+ code.putln(
+ 'static char %s[] = "%s";' % (
+ self.entry.doc_cname,
+ self.entry.doc))
+ if with_pymethdef:
+ code.put(
+ "static PyMethodDef %s = " %
+ self.entry.pymethdef_cname)
+ code.put_pymethoddef(self.entry, ";")
+ code.putln("%s {" % header)
+
+ def generate_argument_declarations(self, env, code):
+ for arg in self.args:
+ if arg.is_generic: # or arg.needs_conversion:
+ code.put_var_declaration(arg.entry)
+
+ def generate_keyword_list(self, code):
+ if self.entry.signature.has_generic_args:
+ code.put(
+ "static char *%s[] = {" %
+ Naming.kwdlist_cname)
+ for arg in self.args:
+ if arg.is_generic:
+ code.put(
+ '"%s",' %
+ arg.name)
+ code.putln(
+ "0};")
+
+ def generate_argument_parsing_code(self, code):
+ # Generate PyArg_ParseTuple call for generic
+ # arguments, if any.
+ if self.entry.signature.has_generic_args:
+ arg_addrs = []
+ arg_formats = []
+ default_seen = 0
+ for arg in self.args:
+ arg_entry = arg.entry
+ if arg.is_generic:
+ if arg.default:
+ code.putln(
+ "%s = %s;" % (
+ arg_entry.cname,
+ arg.default_entry.cname))
+ if not default_seen:
+ arg_formats.append("|")
+ default_seen = 1
+ elif default_seen:
+ error(arg.pos, "Non-default argument following default argument")
+ arg_addrs.append("&" + arg_entry.cname)
+ format = arg_entry.type.parsetuple_format
+ if format:
+ arg_formats.append(format)
+ else:
+ error(arg.pos,
+ "Cannot convert Python object argument to type '%s'"
+ % arg.type)
+ argformat = '"%s"' % string.join(arg_formats, "")
+ has_starargs = self.star_arg is not None or self.starstar_arg is not None
+ if has_starargs:
+ self.generate_stararg_getting_code(code)
+ pt_arglist = [Naming.args_cname, Naming.kwds_cname, argformat,
+ Naming.kwdlist_cname] + arg_addrs
+ pt_argstring = string.join(pt_arglist, ", ")
+ code.put(
+ 'if (!PyArg_ParseTupleAndKeywords(%s)) ' %
+ pt_argstring)
+ error_return_code = "return %s;" % self.error_value()
+ if has_starargs:
+ code.putln("{")
+ code.put_xdecref(Naming.args_cname, py_object_type)
+ code.put_xdecref(Naming.kwds_cname, py_object_type)
+ self.generate_arg_xdecref(self.star_arg, code)
+ self.generate_arg_xdecref(self.starstar_arg, code)
+ code.putln(error_return_code)
+ code.putln("}")
+ else:
+ code.putln(error_return_code)
+
+ def put_stararg_decrefs(self, code):
+ if self.star_arg or self.starstar_arg:
+ code.put_xdecref(Naming.args_cname, py_object_type)
+ code.put_xdecref(Naming.kwds_cname, py_object_type)
+
+ def generate_arg_xdecref(self, arg, code):
+ if arg:
+ code.put_var_xdecref(arg.entry)
+
+ def arg_address(self, arg):
+ if arg:
+ return "&%s" % arg.entry.cname
+ else:
+ return 0
+
+ def generate_stararg_getting_code(self, code):
+ if self.star_arg or self.starstar_arg:
+ if not self.entry.signature.has_generic_args:
+ error(self.pos, "This method cannot have * or ** arguments")
+ star_arg_addr = self.arg_address(self.star_arg)
+ starstar_arg_addr = self.arg_address(self.starstar_arg)
+ code.putln(
+ "if (__Pyx_GetStarArgs(&%s, &%s, %s, %s, %s, %s) < 0) return %s;" % (
+ Naming.args_cname,
+ Naming.kwds_cname,
+ Naming.kwdlist_cname,
+ len(self.args) - self.entry.signature.num_fixed_args(),
+ star_arg_addr,
+ starstar_arg_addr,
+ self.error_value()))
+
+ def generate_argument_conversion_code(self, code):
+ # Generate code to convert arguments from
+ # signature type to declared type, if needed.
+ for arg in self.args:
+ if arg.needs_conversion:
+ self.generate_arg_conversion(arg, code)
+
+ def generate_arg_conversion(self, arg, code):
+ # Generate conversion code for one argument.
+ old_type = arg.hdr_type
+ new_type = arg.type
+ if old_type.is_pyobject:
+ self.generate_arg_conversion_from_pyobject(arg, code)
+ elif new_type.is_pyobject:
+ self.generate_arg_conversion_to_pyobject(arg, code)
+ else:
+ if new_type.assignable_from(old_type):
+ code.putln(
+ "%s = %s;" % (arg.entry.cname, arg.hdr_cname))
+ else:
+ error(arg.pos,
+ "Cannot convert argument from '%s' to '%s'" %
+ (old_type, new_type))
+
+ def generate_arg_conversion_from_pyobject(self, arg, code):
+ new_type = arg.type
+ func = new_type.from_py_function
+ if func:
+ code.putln("%s = %s(%s); if (PyErr_Occurred()) %s" % (
+ arg.entry.cname,
+ func,
+ arg.hdr_cname,
+ code.error_goto(arg.pos)))
+ else:
+ error(arg.pos,
+ "Cannot convert Python object argument to type '%s'"
+ % new_type)
+
+ def generate_arg_conversion_to_pyobject(self, arg, code):
+ old_type = arg.hdr_type
+ func = old_type.to_py_function
+ if func:
+ code.putln("%s = %s(%s); if (!%s) %s" % (
+ arg.entry.cname,
+ func,
+ arg.hdr_cname,
+ arg.entry.cname,
+ code.error_goto(arg.pos)))
+ else:
+ error(arg.pos,
+ "Cannot convert argument of type '%s' to Python object"
+ % old_type)
+
+ def generate_argument_type_tests(self, code):
+ # Generate type tests for args whose signature
+ # type is PyObject * and whose declared type is
+ # a subtype thereof.
+ for arg in self.args:
+ if arg.needs_type_test:
+ self.generate_arg_type_test(arg, code)
+
+ def generate_arg_type_test(self, arg, code):
+ # Generate type test for one argument.
+ if arg.type.typeobj_is_available():
+ typeptr_cname = arg.type.typeptr_cname
+ arg_code = "((PyObject *)%s)" % arg.entry.cname
+ code.putln(
+ 'if (!__Pyx_ArgTypeTest(%s, %s, %d, "%s")) %s' % (
+ arg_code,
+ typeptr_cname,
+ not arg.not_none,
+ arg.name,
+ code.error_goto(arg.pos)))
+ else:
+ error(arg.pos, "Cannot test type of extern C class "
+ "without type object name specification")
+
+ def generate_execution_code(self, code):
+ # Evaluate and store argument default values
+ for arg in self.args:
+ default = arg.default
+ if default:
+ default.generate_evaluation_code(code)
+ default.make_owned_reference(code)
+ code.putln(
+ "%s = %s;" % (
+ arg.default_entry.cname,
+ default.result_as(arg.default_entry.type)))
+ if default.is_temp and default.type.is_pyobject:
+ code.putln(
+ "%s = 0;" %
+ default.result_code)
+ # For Python class methods, create and store function object
+ if self.assmt:
+ self.assmt.generate_execution_code(code)
+
+ def error_value(self):
+ return self.entry.signature.error_value
+
+ def caller_will_check_exceptions(self):
+ return 1
+
+
+class PyClassDefNode(StatNode, BlockNode):
+ # A Python class definition.
+ #
+ # name string Name of the class
+ # doc string or None
+ # body StatNode Attribute definition code
+ # entry Symtab.Entry
+ # scope PyClassScope
+ #
+ # The following subnodes are constructed internally:
+ #
+ # dict DictNode Class dictionary
+ # classobj ClassNode Class object
+ # target NameNode Variable to assign class object to
+
+ def __init__(self, pos, name, bases, doc, body):
+ StatNode.__init__(self, pos)
+ self.name = name
+ self.doc = doc
+ self.body = body
+ import ExprNodes
+ self.dict = ExprNodes.DictNode(pos, key_value_pairs = [])
+ if self.doc:
+ doc_node = ExprNodes.StringNode(pos, value = self.doc)
+ else:
+ doc_node = None
+ self.classobj = ExprNodes.ClassNode(pos,
+ name = ExprNodes.StringNode(pos, value = name),
+ bases = bases, dict = self.dict, doc = doc_node)
+ self.target = ExprNodes.NameNode(pos, name = name)
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ self.dict.analyse_expressions(env)
+ self.classobj.analyse_expressions(env)
+ genv = env.global_scope()
+ cenv = PyClassScope(name = self.name, outer_scope = genv)
+ cenv.class_dict_cname = self.dict.result_code
+ cenv.class_obj_cname = self.classobj.result_code
+ self.scope = cenv
+ self.body.analyse_declarations(cenv)
+ self.body.analyse_expressions(cenv)
+ self.target.analyse_target_expression(env)
+ self.dict.release_temp(env)
+ self.classobj.release_temp(env)
+ self.target.release_target_temp(env)
+ #env.recycle_pending_temps()
+
+ def generate_function_definitions(self, env, code):
+ self.generate_py_string_decls(self.scope, code)
+ self.body.generate_function_definitions(
+ self.scope, code)
+
+ def generate_execution_code(self, code):
+ self.dict.generate_evaluation_code(code)
+ self.classobj.generate_evaluation_code(code)
+ self.body.generate_execution_code(code)
+ self.target.generate_assignment_code(self.classobj, code)
+ self.dict.generate_disposal_code(code)
+
+
+class CClassDefNode(StatNode):
+ # An extension type definition.
+ #
+ # visibility 'private' or 'public' or 'extern'
+ # typedef_flag boolean
+ # module_name string or None For import of extern type objects
+ # class_name string Unqualified name of class
+ # as_name string or None Name to declare as in this scope
+ # base_class_module string or None Module containing the base class
+ # base_class_name string or None Name of the base class
+ # objstruct_name string or None Specified C name of object struct
+ # typeobj_name string or None Specified C name of type object
+ # in_pxd boolean Is in a .pxd file
+ # doc string or None
+ # body StatNode or None
+ # entry Symtab.Entry
+ # base_type PyExtensionType or None
+
+ def analyse_declarations(self, env):
+ #print "CClassDefNode.analyse_declarations:", self.class_name
+ #print "...visibility =", self.visibility
+ #print "...module_name =", self.module_name
+ if env.in_cinclude and not self.objstruct_name:
+ error(self.pos, "Object struct name specification required for "
+ "C class defined in 'extern from' block")
+ self.base_type = None
+ if self.base_class_name:
+ if self.base_class_module:
+ base_class_scope = env.find_module(self.base_class_module, self.pos)
+ else:
+ base_class_scope = env
+ if base_class_scope:
+ base_class_entry = base_class_scope.find(self.base_class_name, self.pos)
+ if base_class_entry:
+ if not base_class_entry.is_type:
+ error(self.pos, "'%s' is not a type name" % self.base_class_name)
+ elif not base_class_entry.type.is_extension_type:
+ error(self.pos, "'%s' is not an extension type" % self.base_class_name)
+ elif not base_class_entry.type.is_complete():
+ error(self.pos, "Base class '%s' is incomplete" % self.base_class_name)
+ else:
+ self.base_type = base_class_entry.type
+ has_body = self.body is not None
+ self.entry = env.declare_c_class(
+ name = self.class_name,
+ pos = self.pos,
+ defining = has_body and self.in_pxd,
+ implementing = has_body and not self.in_pxd,
+ module_name = self.module_name,
+ base_type = self.base_type,
+ objstruct_cname = self.objstruct_name,
+ typeobj_cname = self.typeobj_name,
+ visibility = self.visibility,
+ typedef_flag = self.typedef_flag)
+ scope = self.entry.type.scope
+ if self.doc:
+ scope.doc = self.doc
+ if has_body:
+ self.body.analyse_declarations(scope)
+ if self.in_pxd:
+ scope.defined = 1
+ else:
+ scope.implemented = 1
+ env.allocate_vtable_names(self.entry)
+
+ def analyse_expressions(self, env):
+ if self.body:
+ self.body.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ if self.body:
+ self.body.generate_function_definitions(
+ self.entry.type.scope, code)
+
+ def generate_execution_code(self, code):
+ # This is needed to generate evaluation code for
+ # default values of method arguments.
+ if self.body:
+ self.body.generate_execution_code(code)
+
+
+class PropertyNode(StatNode):
+ # Definition of a property in an extension type.
+ #
+ # name string
+ # doc string or None Doc string
+ # body StatListNode
+
+ def analyse_declarations(self, env):
+ entry = env.declare_property(self.name, self.doc, self.pos)
+ if entry:
+ if self.doc:
+ doc_entry = env.get_string_const(self.doc)
+ entry.doc_cname = doc_entry.cname
+ self.body.analyse_declarations(entry.scope)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+
+ def generate_function_definitions(self, env, code):
+ self.body.generate_function_definitions(env, code)
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class GlobalNode(StatNode):
+ # Global variable declaration.
+ #
+ # names [string]
+
+ def analyse_declarations(self, env):
+ for name in self.names:
+ env.declare_global(name, self.pos)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class ExprStatNode(StatNode):
+ # Expression used as a statement.
+ #
+ # expr ExprNode
+
+ def analyse_expressions(self, env):
+ self.expr.analyse_expressions(env)
+ self.expr.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+
+ def generate_execution_code(self, code):
+ self.expr.generate_evaluation_code(code)
+ if not self.expr.is_temp and self.expr.result_code:
+ code.putln("%s;" % self.expr.result_code)
+ self.expr.generate_disposal_code(code)
+
+
+class AssignmentNode(StatNode):
+ # Abstract base class for assignment nodes.
+ #
+ # The analyse_expressions and generate_execution_code
+ # phases of assignments are split into two sub-phases
+ # each, to enable all the right hand sides of a
+ # parallel assignment to be evaluated before assigning
+ # to any of the left hand sides.
+
+ def analyse_expressions(self, env):
+ self.analyse_expressions_1(env)
+ self.analyse_expressions_2(env)
+
+ def generate_execution_code(self, code):
+ self.generate_rhs_evaluation_code(code)
+ self.generate_assignment_code(code)
+
+
+class SingleAssignmentNode(AssignmentNode):
+ # The simplest case:
+ #
+ # a = b
+ #
+ # lhs ExprNode Left hand side
+ # rhs ExprNode Right hand side
+
+ def analyse_declarations(self, env):
+ self.lhs.analyse_target_declaration(env)
+
+ def analyse_expressions_1(self, env, use_temp = 0):
+ self.rhs.analyse_types(env)
+ self.lhs.analyse_target_types(env)
+ self.rhs = self.rhs.coerce_to(self.lhs.type, env)
+ if use_temp:
+ self.rhs = self.rhs.coerce_to_temp(env)
+ self.rhs.allocate_temps(env)
+
+ def analyse_expressions_2(self, env):
+ self.lhs.allocate_target_temps(env)
+ self.lhs.release_target_temp(env)
+ self.rhs.release_temp(env)
+
+ def generate_rhs_evaluation_code(self, code):
+ self.rhs.generate_evaluation_code(code)
+
+ def generate_assignment_code(self, code):
+ self.lhs.generate_assignment_code(self.rhs, code)
+
+
+class CascadedAssignmentNode(AssignmentNode):
+ # An assignment with multiple left hand sides:
+ #
+ # a = b = c
+ #
+ # lhs_list [ExprNode] Left hand sides
+ # rhs ExprNode Right hand sides
+ #
+ # Used internally:
+ #
+ # coerced_rhs_list [ExprNode] RHS coerced to type of each LHS
+
+ def analyse_declarations(self, env):
+ for lhs in self.lhs_list:
+ lhs.analyse_target_declaration(env)
+
+# def analyse_expressions(self, env):
+# import ExprNodes
+# self.rhs.analyse_types(env)
+# self.rhs = self.rhs.coerce_to_temp(env)
+# self.rhs.allocate_temps(env)
+# self.coerced_rhs_list = []
+# for lhs in self.lhs_list:
+# lhs.analyse_target_types(env)
+# coerced_rhs = ExprNodes.CloneNode(self.rhs).coerce_to(lhs.type, env)
+# self.coerced_rhs_list.append(coerced_rhs)
+# coerced_rhs.allocate_temps(env)
+# lhs.allocate_target_temps(env)
+# coerced_rhs.release_temp(env)
+# lhs.release_target_temp(env)
+# self.rhs.release_temp(env)
+
+ def analyse_expressions_1(self, env, use_temp = 0):
+ self.rhs.analyse_types(env)
+ if use_temp:
+ self.rhs = self.rhs.coerce_to_temp(env)
+ else:
+ self.rhs = self.rhs.coerce_to_simple(env)
+ self.rhs.allocate_temps(env)
+
+ def analyse_expressions_2(self, env):
+ from ExprNodes import CloneNode
+ self.coerced_rhs_list = []
+ for lhs in self.lhs_list:
+ lhs.analyse_target_types(env)
+ rhs = CloneNode(self.rhs)
+ rhs = rhs.coerce_to(lhs.type, env)
+ self.coerced_rhs_list.append(rhs)
+ rhs.allocate_temps(env)
+ lhs.allocate_target_temps(env)
+ lhs.release_target_temp(env)
+ rhs.release_temp(env)
+ self.rhs.release_temp(env)
+
+# def generate_execution_code(self, code):
+# self.rhs.generate_evaluation_code(code)
+# for i in range(len(self.lhs_list)):
+# lhs = self.lhs_list[i]
+# rhs = self.coerced_rhs_list[i]
+# rhs.generate_evaluation_code(code)
+# lhs.generate_assignment_code(rhs, code)
+# # Assignment has already disposed of the cloned RHS
+# self.rhs.generate_disposal_code(code)
+
+ def generate_rhs_evaluation_code(self, code):
+ self.rhs.generate_evaluation_code(code)
+
+ def generate_assignment_code(self, code):
+ for i in range(len(self.lhs_list)):
+ lhs = self.lhs_list[i]
+ rhs = self.coerced_rhs_list[i]
+ rhs.generate_evaluation_code(code)
+ lhs.generate_assignment_code(rhs, code)
+ # Assignment has disposed of the cloned RHS
+ self.rhs.generate_disposal_code(code)
+
+class ParallelAssignmentNode(AssignmentNode):
+ # A combined packing/unpacking assignment:
+ #
+ # a, b, c = d, e, f
+ #
+ # This has been rearranged by the parser into
+ #
+ # a = d ; b = e ; c = f
+ #
+ # but we must evaluate all the right hand sides
+ # before assigning to any of the left hand sides.
+ #
+ # stats [AssignmentNode] The constituent assignments
+
+ def analyse_declarations(self, env):
+ for stat in self.stats:
+ stat.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ for stat in self.stats:
+ stat.analyse_expressions_1(env, use_temp = 1)
+ for stat in self.stats:
+ stat.analyse_expressions_2(env)
+
+ def generate_execution_code(self, code):
+ for stat in self.stats:
+ stat.generate_rhs_evaluation_code(code)
+ for stat in self.stats:
+ stat.generate_assignment_code(code)
+
+
+class PrintStatNode(StatNode):
+ # print statement
+ #
+ # args [ExprNode]
+ # ends_with_comma boolean
+
+ def analyse_expressions(self, env):
+ for i in range(len(self.args)):
+ arg = self.args[i]
+ arg.analyse_types(env)
+ arg = arg.coerce_to_pyobject(env)
+ arg.allocate_temps(env)
+ arg.release_temp(env)
+ self.args[i] = arg
+ #env.recycle_pending_temps() # TEMPORARY
+ env.use_utility_code(printing_utility_code)
+
+ def generate_execution_code(self, code):
+ for arg in self.args:
+ arg.generate_evaluation_code(code)
+ code.putln(
+ "if (__Pyx_PrintItem(%s) < 0) %s" % (
+ arg.py_result(),
+ code.error_goto(self.pos)))
+ arg.generate_disposal_code(code)
+ if not self.ends_with_comma:
+ code.putln(
+ "if (__Pyx_PrintNewline() < 0) %s" %
+ code.error_goto(self.pos))
+
+
+class DelStatNode(StatNode):
+ # del statement
+ #
+ # args [ExprNode]
+
+ def analyse_declarations(self, env):
+ for arg in self.args:
+ arg.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ for arg in self.args:
+ arg.analyse_target_expression(env)
+ if not arg.type.is_pyobject:
+ error(arg.pos, "Deletion of non-Python object")
+ #env.recycle_pending_temps() # TEMPORARY
+
+ def generate_execution_code(self, code):
+ for arg in self.args:
+ if arg.type.is_pyobject:
+ arg.generate_deletion_code(code)
+ # else error reported earlier
+
+
+class PassStatNode(StatNode):
+ # pass statement
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class BreakStatNode(StatNode):
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ if not code.break_label:
+ error(self.pos, "break statement not inside loop")
+ else:
+ code.putln(
+ "goto %s;" %
+ code.break_label)
+
+
+class ContinueStatNode(StatNode):
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ if code.in_try_finally:
+ error(self.pos, "continue statement inside try of try...finally")
+ elif not code.continue_label:
+ error(self.pos, "continue statement not inside loop")
+ else:
+ code.putln(
+ "goto %s;" %
+ code.continue_label)
+
+
+class ReturnStatNode(StatNode):
+ # return statement
+ #
+ # value ExprNode or None
+ # return_type PyrexType
+ # temps_in_use [Entry] Temps in use at time of return
+
+ def analyse_expressions(self, env):
+ return_type = env.return_type
+ self.return_type = return_type
+ self.temps_in_use = env.temps_in_use()
+ if not return_type:
+ error(self.pos, "Return not inside a function body")
+ return
+ if self.value:
+ self.value.analyse_types(env)
+ if return_type.is_void or return_type.is_returncode:
+ error(self.value.pos,
+ "Return with value in void function")
+ else:
+ self.value = self.value.coerce_to(env.return_type, env)
+ self.value.allocate_temps(env)
+ self.value.release_temp(env)
+ else:
+ if (not return_type.is_void
+ and not return_type.is_pyobject
+ and not return_type.is_returncode):
+ error(self.pos, "Return value required")
+
+ def generate_execution_code(self, code):
+ if not self.return_type:
+ # error reported earlier
+ return
+ for entry in self.temps_in_use:
+ code.put_var_decref_clear(entry)
+ if self.value:
+ self.value.generate_evaluation_code(code)
+ self.value.make_owned_reference(code)
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ self.value.result_as(self.return_type)))
+ self.value.generate_post_assignment_code(code)
+ else:
+ if self.return_type.is_pyobject:
+ code.put_init_to_py_none(Naming.retval_cname, self.return_type)
+ elif self.return_type.is_returncode:
+ code.putln(
+ "%s = %s;" % (
+ Naming.retval_cname,
+ self.return_type.default_value))
+ code.putln(
+ "goto %s;" %
+ code.return_label)
+
+
+class RaiseStatNode(StatNode):
+ # raise statement
+ #
+ # exc_type ExprNode or None
+ # exc_value ExprNode or None
+ # exc_tb ExprNode or None
+
+ def analyse_expressions(self, env):
+ if self.exc_type:
+ self.exc_type.analyse_types(env)
+ self.exc_type = self.exc_type.coerce_to_pyobject(env)
+ self.exc_type.allocate_temps(env)
+ if self.exc_value:
+ self.exc_value.analyse_types(env)
+ self.exc_value = self.exc_value.coerce_to_pyobject(env)
+ self.exc_value.allocate_temps(env)
+ if self.exc_tb:
+ self.exc_tb.analyse_types(env)
+ self.exc_tb = self.exc_tb.coerce_to_pyobject(env)
+ self.exc_tb.allocate_temps(env)
+ if self.exc_type:
+ self.exc_type.release_temp(env)
+ if self.exc_value:
+ self.exc_value.release_temp(env)
+ if self.exc_tb:
+ self.exc_tb.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ if not (self.exc_type or self.exc_value or self.exc_tb):
+ env.use_utility_code(reraise_utility_code)
+ else:
+ env.use_utility_code(raise_utility_code)
+
+ def generate_execution_code(self, code):
+ if self.exc_type:
+ self.exc_type.generate_evaluation_code(code)
+ type_code = self.exc_type.py_result()
+ else:
+ type_code = 0
+ if self.exc_value:
+ self.exc_value.generate_evaluation_code(code)
+ value_code = self.exc_value.py_result()
+ else:
+ value_code = "0"
+ if self.exc_tb:
+ self.exc_tb.generate_evaluation_code(code)
+ tb_code = self.exc_tb.py_result()
+ else:
+ tb_code = "0"
+ if self.exc_type or self.exc_value or self.exc_tb:
+ code.putln(
+ "__Pyx_Raise(%s, %s, %s);" % (
+ type_code,
+ value_code,
+ tb_code))
+ else:
+ code.putln(
+ "__Pyx_ReRaise();")
+ if self.exc_type:
+ self.exc_type.generate_disposal_code(code)
+ if self.exc_value:
+ self.exc_value.generate_disposal_code(code)
+ if self.exc_tb:
+ self.exc_tb.generate_disposal_code(code)
+ code.putln(
+ code.error_goto(self.pos))
+
+
+class AssertStatNode(StatNode):
+ # assert statement
+ #
+ # cond ExprNode
+ # value ExprNode or None
+
+ def analyse_expressions(self, env):
+ self.cond = self.cond.analyse_boolean_expression(env)
+ if self.value:
+ self.value.analyse_types(env)
+ self.value = self.value.coerce_to_pyobject(env)
+ self.value.allocate_temps(env)
+ self.cond.release_temp(env)
+ if self.value:
+ self.value.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+
+ def generate_execution_code(self, code):
+ self.cond.generate_evaluation_code(code)
+ if self.value:
+ self.value.generate_evaluation_code(code)
+ code.putln(
+ "if (!%s) {" %
+ self.cond.result_code)
+ if self.value:
+ code.putln(
+ "PyErr_SetObject(PyExc_AssertionError, %s);" %
+ self.value.py_result())
+ else:
+ code.putln(
+ "PyErr_SetNone(PyExc_AssertionError);")
+ code.putln(
+ code.error_goto(self.pos))
+ code.putln(
+ "}")
+ self.cond.generate_disposal_code(code)
+ if self.value:
+ self.value.generate_disposal_code(code)
+
+
+class IfStatNode(StatNode):
+ # if statement
+ #
+ # if_clauses [IfClauseNode]
+ # else_clause StatNode or None
+
+ def analyse_declarations(self, env):
+ for if_clause in self.if_clauses:
+ if_clause.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ for if_clause in self.if_clauses:
+ if_clause.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+
+ def generate_execution_code(self, code):
+ end_label = code.new_label()
+ for if_clause in self.if_clauses:
+ if_clause.generate_execution_code(code, end_label)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(end_label)
+
+
+class IfClauseNode(Node):
+ # if or elif clause in an if statement
+ #
+ # condition ExprNode
+ # body StatNode
+
+ def analyse_declarations(self, env):
+ self.condition.analyse_declarations(env)
+ self.body.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.condition = \
+ self.condition.analyse_temp_boolean_expression(env)
+ self.condition.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ self.body.analyse_expressions(env)
+
+ def generate_execution_code(self, code, end_label):
+ self.condition.generate_evaluation_code(code)
+ code.putln(
+ "if (%s) {" %
+ self.condition.result_code)
+ self.body.generate_execution_code(code)
+ code.putln(
+ "goto %s;" %
+ end_label)
+ code.putln("}")
+
+
+class WhileStatNode(StatNode):
+ # while statement
+ #
+ # condition ExprNode
+ # body StatNode
+ # else_clause StatNode
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.condition = \
+ self.condition.analyse_temp_boolean_expression(env)
+ self.condition.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ self.body.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ code.putln(
+ "while (1) {")
+ code.put_label(code.continue_label)
+ self.condition.generate_evaluation_code(code)
+ code.putln(
+ "if (!%s) break;" %
+ self.condition.result_code)
+ self.body.generate_execution_code(code)
+ code.putln("}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+
+
+class ForInStatNode(StatNode):
+ # for statement
+ #
+ # target ExprNode
+ # iterator IteratorNode
+ # body StatNode
+ # else_clause StatNode
+ # item NextNode used internally
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.iterator.analyse_expressions(env)
+ self.target.analyse_target_types(env)
+ self.item = ExprNodes.NextNode(self.iterator, env)
+ self.item = self.item.coerce_to(self.target.type, env)
+ self.item.allocate_temps(env)
+ self.target.allocate_target_temps(env)
+ self.item.release_temp(env)
+ self.target.release_target_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ self.body.analyse_expressions(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+ self.iterator.release_temp(env)
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ self.iterator.generate_evaluation_code(code)
+ code.putln(
+ "for (;;) {")
+ code.put_label(code.continue_label)
+ self.item.generate_evaluation_code(code)
+ self.target.generate_assignment_code(self.item, code)
+ self.body.generate_execution_code(code)
+ code.putln(
+ "}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+ self.iterator.generate_disposal_code(code)
+
+
+class ForFromStatNode(StatNode):
+ # for name from expr rel name rel expr
+ #
+ # target NameNode
+ # bound1 ExprNode
+ # relation1 string
+ # relation2 string
+ # bound2 ExprNode
+ # body StatNode
+ # else_clause StatNode or None
+ #
+ # Used internally:
+ #
+ # loopvar_name string
+ # py_loopvar_node PyTempNode or None
+
+ def analyse_declarations(self, env):
+ self.target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.target.analyse_target_types(env)
+ self.bound1.analyse_types(env)
+ self.bound2.analyse_types(env)
+ self.bound1 = self.bound1.coerce_to_integer(env)
+ self.bound2 = self.bound2.coerce_to_integer(env)
+ if not (self.bound2.is_name or self.bound2.is_literal):
+ self.bound2 = self.bound2.coerce_to_temp(env)
+ target_type = self.target.type
+ if not (target_type.is_pyobject
+ or target_type.assignable_from(PyrexTypes.c_int_type)):
+ error(self.target.pos,
+ "Cannot assign integer to variable of type '%s'" % target_type)
+ if target_type.is_int:
+ self.loopvar_name = self.target.entry.cname
+ self.py_loopvar_node = None
+ else:
+ c_loopvar_node = ExprNodes.TempNode(self.pos,
+ PyrexTypes.c_long_type, env)
+ c_loopvar_node.allocate_temps(env)
+ self.loopvar_name = c_loopvar_node.result_code
+ self.py_loopvar_node = \
+ ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env)
+ self.bound1.allocate_temps(env)
+ self.bound2.allocate_temps(env)
+ if self.py_loopvar_node:
+ self.py_loopvar_node.allocate_temps(env)
+ self.target.allocate_target_temps(env)
+ self.target.release_target_temp(env)
+ if self.py_loopvar_node:
+ self.py_loopvar_node.release_temp(env)
+ self.body.analyse_expressions(env)
+ if self.py_loopvar_node:
+ c_loopvar_node.release_temp(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+ self.bound1.release_temp(env)
+ self.bound2.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+
+ def generate_execution_code(self, code):
+ old_loop_labels = code.new_loop_labels()
+ self.bound1.generate_evaluation_code(code)
+ self.bound2.generate_evaluation_code(code)
+ offset, incop = self.relation_table[self.relation1]
+ code.putln(
+ "for (%s = %s%s; %s %s %s; %s%s) {" % (
+ self.loopvar_name,
+ self.bound1.result_code, offset,
+ self.loopvar_name, self.relation2, self.bound2.result_code,
+ incop, self.loopvar_name))
+ if self.py_loopvar_node:
+ self.py_loopvar_node.generate_evaluation_code(code)
+ self.target.generate_assignment_code(self.py_loopvar_node, code)
+ self.body.generate_execution_code(code)
+ code.put_label(code.continue_label)
+ code.putln("}")
+ break_label = code.break_label
+ code.set_loop_labels(old_loop_labels)
+ if self.else_clause:
+ code.putln("/*else*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln("}")
+ code.put_label(break_label)
+ self.bound1.generate_disposal_code(code)
+ self.bound2.generate_disposal_code(code)
+
+ relation_table = {
+ # {relop : (initial offset, increment op)}
+ '<=': ("", "++"),
+ '<' : ("+1", "++"),
+ '>=': ("", "--"),
+ '>' : ("-1", "--")
+ }
+
+
+class TryExceptStatNode(StatNode):
+ # try .. except statement
+ #
+ # body StatNode
+ # except_clauses [ExceptClauseNode]
+ # else_clause StatNode or None
+ # cleanup_list [Entry] temps to clean up on error
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ for except_clause in self.except_clauses:
+ except_clause.analyse_declarations(env)
+ if self.else_clause:
+ self.else_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+ self.cleanup_list = env.free_temp_entries[:]
+ for except_clause in self.except_clauses:
+ except_clause.analyse_expressions(env)
+ if self.else_clause:
+ self.else_clause.analyse_expressions(env)
+
+ def generate_execution_code(self, code):
+ old_error_label = code.new_error_label()
+ our_error_label = code.error_label
+ end_label = code.new_label()
+ code.putln(
+ "/*try:*/ {")
+ self.body.generate_execution_code(code)
+ code.putln(
+ "}")
+ code.error_label = old_error_label
+ if self.else_clause:
+ code.putln(
+ "/*else:*/ {")
+ self.else_clause.generate_execution_code(code)
+ code.putln(
+ "}")
+ code.putln(
+ "goto %s;" %
+ end_label)
+ code.put_label(our_error_label)
+ code.put_var_xdecrefs_clear(self.cleanup_list)
+ default_clause_seen = 0
+ for except_clause in self.except_clauses:
+ if not except_clause.pattern:
+ default_clause_seen = 1
+ else:
+ if default_clause_seen:
+ error(except_clause.pos, "Default except clause not last")
+ except_clause.generate_handling_code(code, end_label)
+ if not default_clause_seen:
+ code.putln(
+ "goto %s;" %
+ code.error_label)
+ code.put_label(end_label)
+
+
+class ExceptClauseNode(Node):
+ # Part of try ... except statement.
+ #
+ # pattern ExprNode
+ # target ExprNode or None
+ # body StatNode
+ # match_flag string result of exception match
+ # exc_value ExcValueNode used internally
+ # function_name string qualified name of enclosing function
+
+ def analyse_declarations(self, env):
+ if self.target:
+ self.target.analyse_target_declaration(env)
+ self.body.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ genv = env.global_scope()
+ self.function_name = env.qualified_name
+ if self.pattern:
+ self.pattern.analyse_expressions(env)
+ self.pattern = self.pattern.coerce_to_pyobject(env)
+ self.match_flag = env.allocate_temp(PyrexTypes.c_int_type)
+ self.pattern.release_temp(env)
+ env.release_temp(self.match_flag)
+ self.exc_value = ExprNodes.ExcValueNode(self.pos, env)
+ self.exc_value.allocate_temps(env)
+ if self.target:
+ self.target.analyse_target_expression(env)
+ self.exc_value.release_temp(env)
+ if self.target:
+ self.target.release_target_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+ self.body.analyse_expressions(env)
+
+ def generate_handling_code(self, code, end_label):
+ code.mark_pos(self.pos)
+ if self.pattern:
+ self.pattern.generate_evaluation_code(code)
+ code.putln(
+ "%s = PyErr_ExceptionMatches(%s);" % (
+ self.match_flag,
+ self.pattern.py_result()))
+ self.pattern.generate_disposal_code(code)
+ code.putln(
+ "if (%s) {" %
+ self.match_flag)
+ else:
+ code.putln(
+ "/*except:*/ {")
+ code.putln(
+ '__Pyx_AddTraceback("%s");' % (self.function_name))
+ # We always have to fetch the exception value even if
+ # there is no target, because this also normalises the
+ # exception and stores it in the thread state.
+ self.exc_value.generate_evaluation_code(code)
+ if self.target:
+ self.target.generate_assignment_code(self.exc_value, code)
+ else:
+ self.exc_value.generate_disposal_code(code)
+ self.body.generate_execution_code(code)
+ code.putln(
+ "goto %s;"
+ % end_label)
+ code.putln(
+ "}")
+
+
+class TryFinallyStatNode(StatNode):
+ # try ... finally statement
+ #
+ # body StatNode
+ # finally_clause StatNode
+ # cleanup_list [Entry] temps to clean up on error
+ # exc_vars 3*(string,) temps to hold saved exception
+ #
+ # The plan is that we funnel all continue, break
+ # return and error gotos into the beginning of the
+ # finally block, setting a variable to remember which
+ # one we're doing. At the end of the finally block, we
+ # switch on the variable to figure out where to go.
+ # In addition, if we're doing an error, we save the
+ # exception on entry to the finally block and restore
+ # it on exit.
+
+ disallow_continue_in_try_finally = 0
+ # There doesn't seem to be any point in disallowing
+ # continue in the try block, since we have no problem
+ # handling it.
+
+ def analyse_declarations(self, env):
+ self.body.analyse_declarations(env)
+ self.finally_clause.analyse_declarations(env)
+
+ def analyse_expressions(self, env):
+ self.body.analyse_expressions(env)
+ self.cleanup_list = env.free_temp_entries[:]
+ self.exc_vars = (
+ env.allocate_temp(PyrexTypes.py_object_type),
+ env.allocate_temp(PyrexTypes.py_object_type),
+ env.allocate_temp(PyrexTypes.py_object_type))
+ self.lineno_var = \
+ env.allocate_temp(PyrexTypes.c_int_type)
+ self.finally_clause.analyse_expressions(env)
+ for var in self.exc_vars:
+ env.release_temp(var)
+
+ def generate_execution_code(self, code):
+ old_error_label = code.error_label
+ old_labels = code.all_new_labels()
+ new_labels = code.get_all_labels()
+ new_error_label = code.error_label
+ catch_label = code.new_label()
+ code.putln(
+ "/*try:*/ {")
+ if self.disallow_continue_in_try_finally:
+ was_in_try_finally = code.in_try_finally
+ code.in_try_finally = 1
+ self.body.generate_execution_code(code)
+ if self.disallow_continue_in_try_finally:
+ code.in_try_finally = was_in_try_finally
+ code.putln(
+ "}")
+ code.putln(
+ "/*finally:*/ {")
+ code.putln(
+ "int __pyx_why;")
+ #code.putln(
+ # "PyObject *%s, *%s, *%s;" %
+ # self.exc_vars)
+ #code.putln(
+ # "int %s;" %
+ # self.lineno_var)
+ code.putln(
+ "__pyx_why = 0; goto %s;" %
+ catch_label)
+ for i in range(len(new_labels)):
+ if new_labels[i] and new_labels[i] <> "<try>":
+ if new_labels[i] == new_error_label:
+ self.put_error_catcher(code,
+ new_error_label, i+1, catch_label)
+ else:
+ code.putln(
+ "%s: __pyx_why = %s; goto %s;" % (
+ new_labels[i],
+ i+1,
+ catch_label))
+ code.put_label(catch_label)
+ code.set_all_labels(old_labels)
+ self.finally_clause.generate_execution_code(code)
+ code.putln(
+ "switch (__pyx_why) {")
+ for i in range(len(old_labels)):
+ if old_labels[i]:
+ if old_labels[i] == old_error_label:
+ self.put_error_uncatcher(code, i+1, old_error_label)
+ else:
+ code.putln(
+ "case %s: goto %s;" % (
+ i+1,
+ old_labels[i]))
+ code.putln(
+ "}")
+ code.putln(
+ "}")
+
+ def put_error_catcher(self, code, error_label, i, catch_label):
+ code.putln(
+ "%s: {" %
+ error_label)
+ code.putln(
+ "__pyx_why = %s;" %
+ i)
+ code.put_var_xdecrefs_clear(self.cleanup_list)
+ code.putln(
+ "PyErr_Fetch(&%s, &%s, &%s);" %
+ self.exc_vars)
+ code.putln(
+ "%s = %s;" % (
+ self.lineno_var, Naming.lineno_cname))
+ code.putln(
+ "goto %s;" %
+ catch_label)
+ code.putln(
+ "}")
+
+ def put_error_uncatcher(self, code, i, error_label):
+ code.putln(
+ "case %s: {" %
+ i)
+ code.putln(
+ "PyErr_Restore(%s, %s, %s);" %
+ self.exc_vars)
+ code.putln(
+ "%s = %s;" % (
+ Naming.lineno_cname, self.lineno_var))
+ for var in self.exc_vars:
+ code.putln(
+ "%s = 0;" %
+ var)
+ code.putln(
+ "goto %s;" %
+ error_label)
+ code.putln(
+ "}")
+
+
+class CImportStatNode(StatNode):
+ # cimport statement
+ #
+ # module_name string Qualified name of module being imported
+ # as_name string or None Name specified in "as" clause, if any
+
+ def analyse_declarations(self, env):
+ module_scope = env.find_module(self.module_name, self.pos)
+ if "." in self.module_name:
+ names = self.module_name.split(".")
+ top_name = names[0]
+ top_module_scope = env.context.find_submodule(top_name)
+ module_scope = top_module_scope
+ for name in names[1:]:
+ submodule_scope = module_scope.find_submodule(name)
+ module_scope.declare_module(name, submodule_scope, self.pos)
+ module_scope = submodule_scope
+ if self.as_name:
+ env.declare_module(self.as_name, module_scope, self.pos)
+ else:
+ env.declare_module(top_name, top_module_scope, self.pos)
+ else:
+ name = self.as_name or self.module_name
+ env.declare_module(name, module_scope, self.pos)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FromCImportStatNode(StatNode):
+ # from ... cimport statement
+ #
+ # module_name string Qualified name of module
+ # imported_names [(pos, name, as_name)] Names to be imported
+
+ def analyse_declarations(self, env):
+ module_scope = env.find_module(self.module_name, self.pos)
+ env.add_imported_module(module_scope)
+ for pos, name, as_name in self.imported_names:
+ entry = module_scope.find(name, pos)
+ if entry:
+ local_name = as_name or name
+ env.add_imported_entry(local_name, entry, pos)
+
+ def analyse_expressions(self, env):
+ pass
+
+ def generate_execution_code(self, code):
+ pass
+
+
+class FromImportStatNode(StatNode):
+ # from ... import statement
+ #
+ # module ImportNode
+ # items [(string, NameNode)]
+ # interned_items [(string, NameNode)]
+ # item PyTempNode used internally
+
+ def analyse_declarations(self, env):
+ for _, target in self.items:
+ target.analyse_target_declaration(env)
+
+ def analyse_expressions(self, env):
+ import ExprNodes
+ self.module.analyse_expressions(env)
+ self.item = ExprNodes.PyTempNode(self.pos, env)
+ self.item.allocate_temp(env)
+ self.interned_items = []
+ for name, target in self.items:
+ if Options.intern_names:
+ self.interned_items.append((env.intern(name), target))
+ target.analyse_target_expression(env)
+ target.release_temp(env)
+ self.module.release_temp(env)
+ self.item.release_temp(env)
+ #env.recycle_pending_temps() # TEMPORARY
+
+ def generate_execution_code(self, code):
+ self.module.generate_evaluation_code(code)
+ if Options.intern_names:
+ for cname, target in self.interned_items:
+ code.putln(
+ '%s = PyObject_GetAttr(%s, %s); if (!%s) %s' % (
+ self.item.result_code,
+ self.module.py_result(),
+ cname,
+ self.item.result_code,
+ code.error_goto(self.pos)))
+ target.generate_assignment_code(self.item, code)
+ else:
+ for name, target in self.items:
+ code.putln(
+ '%s = PyObject_GetAttrString(%s, "%s"); if (!%s) %s' % (
+ self.item.result_code,
+ self.module.py_result(),
+ name,
+ self.item.result_code,
+ code.error_goto(self.pos)))
+ target.generate_assignment_code(self.item, code)
+ self.module.generate_disposal_code(code)
+
+#------------------------------------------------------------------------------------
+#
+# Runtime support code
+#
+#------------------------------------------------------------------------------------
+
+utility_function_predeclarations = \
+"""
+typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/
+typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/
+static PyObject *__Pyx_UnpackItem(PyObject *, int); /*proto*/
+static int __Pyx_EndUnpack(PyObject *, int); /*proto*/
+static int __Pyx_PrintItem(PyObject *); /*proto*/
+static int __Pyx_PrintNewline(void); /*proto*/
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static void __Pyx_ReRaise(void); /*proto*/
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/
+static PyObject *__Pyx_GetExcValue(void); /*proto*/
+static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/
+static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/
+static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds,\
+ char *kwd_list[], int nargs, PyObject **args2, PyObject **kwds2); /*proto*/
+static void __Pyx_WriteUnraisable(char *name); /*proto*/
+static void __Pyx_AddTraceback(char *funcname); /*proto*/
+static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/
+static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
+static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/
+static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+"""
+
+get_name_predeclaration = \
+"static PyObject *__Pyx_GetName(PyObject *dict, char *name); /*proto*/"
+
+get_name_interned_predeclaration = \
+"static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/"
+
+#------------------------------------------------------------------------------------
+
+printing_utility_code = \
+r"""
+static PyObject *__Pyx_GetStdout(void) {
+ PyObject *f = PySys_GetObject("stdout");
+ if (!f) {
+ PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout");
+ }
+ return f;
+}
+
+static int __Pyx_PrintItem(PyObject *v) {
+ PyObject *f;
+
+ if (!(f = __Pyx_GetStdout()))
+ return -1;
+ if (PyFile_SoftSpace(f, 1)) {
+ if (PyFile_WriteString(" ", f) < 0)
+ return -1;
+ }
+ if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0)
+ return -1;
+ if (PyString_Check(v)) {
+ char *s = PyString_AsString(v);
+ int len = PyString_Size(v);
+ if (len > 0 &&
+ isspace(Py_CHARMASK(s[len-1])) &&
+ s[len-1] != ' ')
+ PyFile_SoftSpace(f, 0);
+ }
+ return 0;
+}
+
+static int __Pyx_PrintNewline(void) {
+ PyObject *f;
+
+ if (!(f = __Pyx_GetStdout()))
+ return -1;
+ if (PyFile_WriteString("\n", f) < 0)
+ return -1;
+ PyFile_SoftSpace(f, 0);
+ return 0;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+# The following function is based on do_raise() from ceval.c.
+
+raise_utility_code = \
+"""
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
+ Py_XINCREF(type);
+ Py_XINCREF(value);
+ Py_XINCREF(tb);
+ /* First, check the traceback argument, replacing None with NULL. */
+ if (tb == Py_None) {
+ Py_DECREF(tb);
+ tb = 0;
+ }
+ else if (tb != NULL && !PyTraceBack_Check(tb)) {
+ PyErr_SetString(PyExc_TypeError,
+ "raise: arg 3 must be a traceback or None");
+ goto raise_error;
+ }
+ /* Next, replace a missing value with None */
+ if (value == NULL) {
+ value = Py_None;
+ Py_INCREF(value);
+ }
+ /* Next, repeatedly, replace a tuple exception with its first item */
+ while (PyTuple_Check(type) && PyTuple_Size(type) > 0) {
+ PyObject *tmp = type;
+ type = PyTuple_GET_ITEM(type, 0);
+ Py_INCREF(type);
+ Py_DECREF(tmp);
+ }
+ if (PyString_Check(type))
+ ;
+ else if (PyClass_Check(type))
+ ; /*PyErr_NormalizeException(&type, &value, &tb);*/
+ else if (PyInstance_Check(type)) {
+ /* Raising an instance. The value should be a dummy. */
+ if (value != Py_None) {
+ PyErr_SetString(PyExc_TypeError,
+ "instance exception may not have a separate value");
+ goto raise_error;
+ }
+ else {
+ /* Normalize to raise <class>, <instance> */
+ Py_DECREF(value);
+ value = type;
+ type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+ Py_INCREF(type);
+ }
+ }
+ else {
+ /* Not something you can raise. You get an exception
+ anyway, just not what you specified :-) */
+ PyErr_Format(PyExc_TypeError,
+ "exceptions must be strings, classes, or "
+ "instances, not %s", type->ob_type->tp_name);
+ goto raise_error;
+ }
+ PyErr_Restore(type, value, tb);
+ return;
+raise_error:
+ Py_XDECREF(value);
+ Py_XDECREF(type);
+ Py_XDECREF(tb);
+ return;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+reraise_utility_code = \
+"""
+static void __Pyx_ReRaise(void) {
+ PyThreadState *tstate = PyThreadState_Get();
+ PyObject *type = tstate->exc_type;
+ PyObject *value = tstate->exc_value;
+ PyObject *tb = tstate->exc_traceback;
+ Py_XINCREF(type);
+ Py_XINCREF(value);
+ Py_XINCREF(tb);
+ PyErr_Restore(type, value, tb);
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+arg_type_test_utility_code = \
+"""
+static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name) {
+ if (!type) {
+ PyErr_Format(PyExc_SystemError, "Missing type object");
+ return 0;
+ }
+ if ((none_allowed && obj == Py_None) || PyObject_TypeCheck(obj, type))
+ return 1;
+ PyErr_Format(PyExc_TypeError,
+ "Argument '%s' has incorrect type (expected %s, got %s)",
+ name, type->tp_name, obj->ob_type->tp_name);
+ return 0;
+}
+"""
+
+#------------------------------------------------------------------------------------
+#
+# __Pyx_GetStarArgs splits the args tuple and kwds dict into two parts
+# each, one part suitable for passing to PyArg_ParseTupleAndKeywords,
+# and the other containing any extra arguments. On success, replaces
+# the borrowed references *args and *kwds with references to a new
+# tuple and dict, and passes back new references in *args2 and *kwds2.
+# Does not touch any of its arguments on failure.
+#
+# Any of *kwds, args2 and kwds2 may be 0 (but not args or kwds). If
+# *kwds == 0, it is not changed. If kwds2 == 0 and *kwds != 0, a new
+# reference to the same dictionary is passed back in *kwds.
+#
+
+get_starargs_utility_code = \
+"""
+static int __Pyx_GetStarArgs(
+ PyObject **args,
+ PyObject **kwds,
+ char *kwd_list[],
+ int nargs,
+ PyObject **args2,
+ PyObject **kwds2)
+{
+ PyObject *x = 0, *args1 = 0, *kwds1 = 0;
+
+ if (args2)
+ *args2 = 0;
+ if (kwds2)
+ *kwds2 = 0;
+
+ if (args2) {
+ args1 = PyTuple_GetSlice(*args, 0, nargs);
+ if (!args1)
+ goto bad;
+ *args2 = PyTuple_GetSlice(*args, nargs, PyTuple_Size(*args));
+ if (!*args2)
+ goto bad;
+ }
+ else {
+ args1 = *args;
+ Py_INCREF(args1);
+ }
+
+ if (kwds2) {
+ if (*kwds) {
+ char **p;
+ kwds1 = PyDict_New();
+ if (!kwds)
+ goto bad;
+ *kwds2 = PyDict_Copy(*kwds);
+ if (!*kwds2)
+ goto bad;
+ for (p = kwd_list; *p; p++) {
+ x = PyDict_GetItemString(*kwds, *p);
+ if (x) {
+ if (PyDict_SetItemString(kwds1, *p, x) < 0)
+ goto bad;
+ if (PyDict_DelItemString(*kwds2, *p) < 0)
+ goto bad;
+ }
+ }
+ }
+ else {
+ *kwds2 = PyDict_New();
+ if (!*kwds2)
+ goto bad;
+ }
+ }
+ else {
+ kwds1 = *kwds;
+ Py_XINCREF(kwds1);
+ }
+
+ *args = args1;
+ *kwds = kwds1;
+ return 0;
+bad:
+ Py_XDECREF(args1);
+ Py_XDECREF(kwds1);
+ if (*args2)
+ Py_XDECREF(*args2);
+ if (*kwds2)
+ Py_XDECREF(*kwds2);
+ return -1;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+unraisable_exception_utility_code = \
+"""
+static void __Pyx_WriteUnraisable(char *name) {
+ PyObject *old_exc, *old_val, *old_tb;
+ PyObject *ctx;
+ PyErr_Fetch(&old_exc, &old_val, &old_tb);
+ ctx = PyString_FromString(name);
+ PyErr_Restore(old_exc, old_val, old_tb);
+ if (!ctx)
+ ctx = Py_None;
+ PyErr_WriteUnraisable(ctx);
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+traceback_utility_code = \
+"""
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(char *funcname) {
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ PyObject *py_globals = 0;
+ PyObject *empty_tuple = 0;
+ PyObject *empty_string = 0;
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+
+ py_srcfile = PyString_FromString(%(FILENAME)s);
+ if (!py_srcfile) goto bad;
+ py_funcname = PyString_FromString(funcname);
+ if (!py_funcname) goto bad;
+ py_globals = PyModule_GetDict(%(GLOBALS)s);
+ if (!py_globals) goto bad;
+ empty_tuple = PyTuple_New(0);
+ if (!empty_tuple) goto bad;
+ empty_string = PyString_FromString("");
+ if (!empty_string) goto bad;
+ py_code = PyCode_New(
+ 0, /*int argcount,*/
+ 0, /*int nlocals,*/
+ 0, /*int stacksize,*/
+ 0, /*int flags,*/
+ empty_string, /*PyObject *code,*/
+ empty_tuple, /*PyObject *consts,*/
+ empty_tuple, /*PyObject *names,*/
+ empty_tuple, /*PyObject *varnames,*/
+ empty_tuple, /*PyObject *freevars,*/
+ empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ %(LINENO)s, /*int firstlineno,*/
+ empty_string /*PyObject *lnotab*/
+ );
+ if (!py_code) goto bad;
+ py_frame = PyFrame_New(
+ PyThreadState_Get(), /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ py_globals, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ py_frame->f_lineno = %(LINENO)s;
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ Py_XDECREF(empty_tuple);
+ Py_XDECREF(empty_string);
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+""" % {
+ 'FILENAME': Naming.filename_cname,
+ 'LINENO': Naming.lineno_cname,
+ 'GLOBALS': Naming.module_cname
+}
+
+#------------------------------------------------------------------------------------
+
+type_import_utility_code = \
+"""
+static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name,
+ long size)
+{
+ PyObject *py_module_name = 0;
+ PyObject *py_class_name = 0;
+ PyObject *py_name_list = 0;
+ PyObject *py_module = 0;
+ PyObject *result = 0;
+
+ py_module_name = PyString_FromString(module_name);
+ if (!py_module_name)
+ goto bad;
+ py_class_name = PyString_FromString(class_name);
+ if (!py_class_name)
+ goto bad;
+ py_name_list = PyList_New(1);
+ if (!py_name_list)
+ goto bad;
+ Py_INCREF(py_class_name);
+ if (PyList_SetItem(py_name_list, 0, py_class_name) < 0)
+ goto bad;
+ py_module = __Pyx_Import(py_module_name, py_name_list);
+ if (!py_module)
+ goto bad;
+ result = PyObject_GetAttr(py_module, py_class_name);
+ if (!result)
+ goto bad;
+ if (!PyType_Check(result)) {
+ PyErr_Format(PyExc_TypeError,
+ "%s.%s is not a type object",
+ module_name, class_name);
+ goto bad;
+ }
+ if (((PyTypeObject *)result)->tp_basicsize != size) {
+ PyErr_Format(PyExc_ValueError,
+ "%s.%s does not appear to be the correct type object",
+ module_name, class_name);
+ goto bad;
+ }
+ goto done;
+bad:
+ Py_XDECREF(result);
+ result = 0;
+done:
+ Py_XDECREF(py_module_name);
+ Py_XDECREF(py_class_name);
+ Py_XDECREF(py_name_list);
+ return (PyTypeObject *)result;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+set_vtable_utility_code = \
+"""
+static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
+ PyObject *pycobj = 0;
+ int result;
+
+ pycobj = PyCObject_FromVoidPtr(vtable, 0);
+ if (!pycobj)
+ goto bad;
+ if (PyDict_SetItemString(dict, "__pyx_vtable__", pycobj) < 0)
+ goto bad;
+ result = 0;
+ goto done;
+
+bad:
+ result = -1;
+done:
+ Py_XDECREF(pycobj);
+ return result;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+get_vtable_utility_code = \
+r"""
+static int __Pyx_GetVtable(PyObject *dict, void *vtabptr) {
+ int result;
+ PyObject *pycobj;
+
+ pycobj = PyMapping_GetItemString(dict, "__pyx_vtable__");
+ if (!pycobj)
+ goto bad;
+ *(void **)vtabptr = PyCObject_AsVoidPtr(pycobj);
+ if (!*(void **)vtabptr)
+ goto bad;
+ result = 0;
+ goto done;
+
+bad:
+ result = -1;
+done:
+ Py_XDECREF(pycobj);
+ return result;
+}
+"""
+
+#------------------------------------------------------------------------------------
+
+init_intern_tab_utility_code = \
+"""
+static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) {
+ while (t->p) {
+ *t->p = PyString_InternFromString(t->s);
+ if (!*t->p)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+""";
+
+#------------------------------------------------------------------------------------
+
+init_string_tab_utility_code = \
+"""
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+ while (t->p) {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ if (!*t->p)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+""";
+
+#------------------------------------------------------------------------------------
--- /dev/null
+#
+# Pyrex - Compilation-wide options
+#
+
+intern_names = 1 # Intern global variable and attribute names
--- /dev/null
+#
+# Pyrex Parser
+#
+
+import os, re
+from string import join, replace
+from types import ListType, TupleType
+from Scanning import PyrexScanner
+import Nodes
+import ExprNodes
+from Errors import error, InternalError
+
+def p_ident(s, message = "Expected an identifier"):
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ return name
+ else:
+ s.error(message)
+
+def p_ident_list(s):
+ names = []
+ while s.sy == 'IDENT':
+ names.append(s.systring)
+ s.next()
+ if s.sy <> ',':
+ break
+ s.next()
+ return names
+
+#------------------------------------------
+#
+# Expressions
+#
+#------------------------------------------
+
+def p_binop_expr(s, ops, p_sub_expr):
+ #print "p_binop_expr:", ops, p_sub_expr ###
+ n1 = p_sub_expr(s)
+ #print "p_binop_expr(%s):" % p_sub_expr, s.sy ###
+ while s.sy in ops:
+ op = s.sy
+ pos = s.position()
+ s.next()
+ n2 = p_sub_expr(s)
+ n1 = ExprNodes.binop_node(pos, op, n1, n2)
+ return n1
+
+#test: and_test ('or' and_test)* | lambdef
+
+def p_simple_expr(s):
+ #return p_binop_expr(s, ('or',), p_and_test)
+ return p_rassoc_binop_expr(s, ('or',), p_and_test)
+
+def p_rassoc_binop_expr(s, ops, p_subexpr):
+ n1 = p_subexpr(s)
+ if s.sy in ops:
+ pos = s.position()
+ op = s.sy
+ s.next()
+ n2 = p_rassoc_binop_expr(s, ops, p_subexpr)
+ n1 = ExprNodes.binop_node(pos, op, n1, n2)
+ return n1
+
+#and_test: not_test ('and' not_test)*
+
+def p_and_test(s):
+ #return p_binop_expr(s, ('and',), p_not_test)
+ return p_rassoc_binop_expr(s, ('and',), p_not_test)
+
+#not_test: 'not' not_test | comparison
+
+def p_not_test(s):
+ if s.sy == 'not':
+ pos = s.position()
+ s.next()
+ return ExprNodes.NotNode(pos, operand = p_not_test(s))
+ else:
+ return p_comparison(s)
+
+#comparison: expr (comp_op expr)*
+#comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+
+def p_comparison(s):
+ n1 = p_bit_expr(s)
+ if s.sy in comparison_ops:
+ pos = s.position()
+ op = p_cmp_op(s)
+ n2 = p_bit_expr(s)
+ n1 = ExprNodes.PrimaryCmpNode(pos,
+ operator = op, operand1 = n1, operand2 = n2)
+ if s.sy in comparison_ops:
+ n1.cascade = p_cascaded_cmp(s)
+ return n1
+
+def p_cascaded_cmp(s):
+ pos = s.position()
+ op = p_cmp_op(s)
+ n2 = p_bit_expr(s)
+ result = ExprNodes.CascadedCmpNode(pos,
+ operator = op, operand2 = n2)
+ if s.sy in comparison_ops:
+ result.cascade = p_cascaded_cmp(s)
+ return result
+
+def p_cmp_op(s):
+ if s.sy == 'not':
+ s.next()
+ s.expect('in')
+ op = 'not_in'
+ elif s.sy == 'is':
+ s.next()
+ if s.sy == 'not':
+ s.next()
+ op = 'is_not'
+ else:
+ op = 'is'
+ else:
+ op = s.sy
+ s.next()
+ if op == '<>':
+ op = '!='
+ return op
+
+comparison_ops = (
+ '<', '>', '==', '>=', '<=', '<>', '!=',
+ 'in', 'is', 'not'
+)
+
+#expr: xor_expr ('|' xor_expr)*
+
+def p_bit_expr(s):
+ return p_binop_expr(s, ('|',), p_xor_expr)
+
+#xor_expr: and_expr ('^' and_expr)*
+
+def p_xor_expr(s):
+ return p_binop_expr(s, ('^',), p_and_expr)
+
+#and_expr: shift_expr ('&' shift_expr)*
+
+def p_and_expr(s):
+ return p_binop_expr(s, ('&',), p_shift_expr)
+
+#shift_expr: arith_expr (('<<'|'>>') arith_expr)*
+
+def p_shift_expr(s):
+ return p_binop_expr(s, ('<<', '>>'), p_arith_expr)
+
+#arith_expr: term (('+'|'-') term)*
+
+def p_arith_expr(s):
+ return p_binop_expr(s, ('+', '-'), p_term)
+
+#term: factor (('*'|'/'|'%') factor)*
+
+def p_term(s):
+ return p_binop_expr(s, ('*', '/', '%'), p_factor)
+
+#factor: ('+'|'-'|'~'|'&'|typecast|sizeof) factor | power
+
+def p_factor(s):
+ sy = s.sy
+ if sy in ('+', '-', '~'):
+ op = s.sy
+ pos = s.position()
+ s.next()
+ return ExprNodes.unop_node(pos, op, p_factor(s))
+ elif sy == '&':
+ pos = s.position()
+ s.next()
+ arg = p_factor(s)
+ return ExprNodes.AmpersandNode(pos, operand = arg)
+ elif sy == "<":
+ return p_typecast(s)
+ elif sy == 'IDENT' and s.systring == "sizeof":
+ return p_sizeof(s)
+ else:
+ return p_power(s)
+
+def p_typecast(s):
+ # s.sy == "<"
+ pos = s.position()
+ s.next()
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ s.expect(">")
+ operand = p_factor(s)
+ return ExprNodes.TypecastNode(pos,
+ base_type = base_type,
+ declarator = declarator,
+ operand = operand)
+
+def p_sizeof(s):
+ # s.sy == ident "sizeof"
+ pos = s.position()
+ s.next()
+ s.expect('(')
+ if looking_at_type(s):
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ node = ExprNodes.SizeofTypeNode(pos,
+ base_type = base_type, declarator = declarator)
+ else:
+ operand = p_simple_expr(s)
+ node = ExprNodes.SizeofVarNode(pos, operand = operand)
+ s.expect(')')
+ return node
+
+#power: atom trailer* ('**' factor)*
+
+def p_power(s):
+ n1 = p_atom(s)
+ while s.sy in ('(', '[', '.'):
+ n1 = p_trailer(s, n1)
+ if s.sy == '**':
+ pos = s.position()
+ s.next()
+ n2 = p_factor(s)
+ n1 = ExprNodes.binop_node(pos, '**', n1, n2)
+ return n1
+
+#trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+
+def p_trailer(s, node1):
+ pos = s.position()
+ if s.sy == '(':
+ return p_call(s, node1)
+ elif s.sy == '[':
+ return p_index(s, node1)
+ else: # s.sy == '.'
+ s.next()
+ name = p_ident(s)
+ return ExprNodes.AttributeNode(pos,
+ obj = node1, attribute = name)
+
+# arglist: argument (',' argument)* [',']
+# argument: [test '='] test # Really [keyword '='] test
+
+def p_call(s, function):
+ # s.sy == '('
+ pos = s.position()
+ s.next()
+ positional_args = []
+ keyword_args = []
+ star_arg = None
+ starstar_arg = None
+ while s.sy not in ('*', '**', ')'):
+ arg = p_simple_expr(s)
+ if s.sy == '=':
+ s.next()
+ if not arg.is_name:
+ s.error("Expected an identifier before '='",
+ pos = arg.pos)
+ keyword = ExprNodes.StringNode(arg.pos,
+ value = arg.name)
+ arg = p_simple_expr(s)
+ keyword_args.append((keyword, arg))
+ else:
+ if keyword_args:
+ s.error("Non-keyword arg following keyword arg",
+ pos = arg.pos)
+ positional_args.append(arg)
+ if s.sy <> ',':
+ break
+ s.next()
+ if s.sy == '*':
+ s.next()
+ star_arg = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ if s.sy == '**':
+ s.next()
+ starstar_arg = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ s.expect(')')
+ if not (keyword_args or star_arg or starstar_arg):
+ return ExprNodes.SimpleCallNode(pos,
+ function = function,
+ args = positional_args)
+ else:
+ arg_tuple = None
+ keyword_dict = None
+ if positional_args or not star_arg:
+ arg_tuple = ExprNodes.TupleNode(pos,
+ args = positional_args)
+ if star_arg:
+ star_arg_tuple = ExprNodes.AsTupleNode(pos, arg = star_arg)
+ if arg_tuple:
+ arg_tuple = ExprNodes.binop_node(pos,
+ operator = '+', operand1 = arg_tuple,
+ operand2 = star_arg_tuple)
+ else:
+ arg_tuple = star_arg_tuple
+ if keyword_args:
+ keyword_dict = ExprNodes.DictNode(pos,
+ key_value_pairs = keyword_args)
+ return ExprNodes.GeneralCallNode(pos,
+ function = function,
+ positional_args = arg_tuple,
+ keyword_args = keyword_dict,
+ starstar_arg = starstar_arg)
+
+#lambdef: 'lambda' [varargslist] ':' test
+
+#subscriptlist: subscript (',' subscript)* [',']
+
+def p_index(s, base):
+ # s.sy == '['
+ pos = s.position()
+ s.next()
+ subscripts = p_subscript_list(s)
+ if len(subscripts) == 1 and len(subscripts[0]) == 2:
+ start, stop = subscripts[0]
+ result = ExprNodes.SliceIndexNode(pos,
+ base = base, start = start, stop = stop)
+ else:
+ indexes = make_slice_nodes(pos, subscripts)
+ if len(indexes) == 1:
+ index = indexes[0]
+ else:
+ index = ExprNodes.TupleNode(pos, args = indexes)
+ result = ExprNodes.IndexNode(pos,
+ base = base, index = index)
+ s.expect(']')
+ return result
+
+def p_subscript_list(s):
+ items = [p_subscript(s)]
+ while s.sy == ',':
+ s.next()
+ if s.sy == ']':
+ break
+ items.append(p_subscript(s))
+ return items
+
+#subscript: '.' '.' '.' | test | [test] ':' [test] [':' [test]]
+
+def p_subscript(s):
+ # Parse a subscript and return a list of
+ # 1, 2 or 3 ExprNodes, depending on how
+ # many slice elements were encountered.
+ pos = s.position()
+ if s.sy == '.':
+ expect_ellipsis(s)
+ return [ExprNodes.EllipsisNode(pos)]
+ else:
+ start = p_slice_element(s, (':',))
+ if s.sy <> ':':
+ return [start]
+ s.next()
+ stop = p_slice_element(s, (':', ',', ']'))
+ if s.sy <> ':':
+ return [start, stop]
+ s.next()
+ step = p_slice_element(s, (':', ',', ']'))
+ return [start, stop, step]
+
+def p_slice_element(s, follow_set):
+ # Simple expression which may be missing iff
+ # it is followed by something in follow_set.
+ if s.sy not in follow_set:
+ return p_simple_expr(s)
+ else:
+ return None
+
+def expect_ellipsis(s):
+ s.expect('.')
+ s.expect('.')
+ s.expect('.')
+
+def make_slice_nodes(pos, subscripts):
+ # Convert a list of subscripts as returned
+ # by p_subscript_list into a list of ExprNodes,
+ # creating SliceNodes for elements with 2 or
+ # more components.
+ result = []
+ for subscript in subscripts:
+ if len(subscript) == 1:
+ result.append(subscript[0])
+ else:
+ result.append(make_slice_node(pos, *subscript))
+ return result
+
+def make_slice_node(pos, start, stop = None, step = None):
+ if not start:
+ start = ExprNodes.NoneNode(pos)
+ if not stop:
+ stop = ExprNodes.NoneNode(pos)
+ if not step:
+ step = ExprNodes.NoneNode(pos)
+ return ExprNodes.SliceNode(pos,
+ start = start, stop = stop, step = step)
+
+#atom: '(' [testlist] ')' | '[' [listmaker] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING+
+
+def p_atom(s):
+ pos = s.position()
+ sy = s.sy
+ if sy == '(':
+ s.next()
+ if s.sy == ')':
+ result = ExprNodes.TupleNode(pos, args = [])
+ else:
+ result = p_expr(s)
+ s.expect(')')
+ return result
+ elif sy == '[':
+ return p_list_maker(s)
+ elif sy == '{':
+ return p_dict_maker(s)
+ elif sy == '`':
+ return p_backquote_expr(s)
+ elif sy == 'INT':
+ digits = s.systring
+ if digits[:2] == "0x":
+ value = long(digits[2:], 16)
+ elif digits[:1] == "0":
+ value = int(digits, 8)
+ else:
+ value = int(s.systring)
+ s.next()
+ return ExprNodes.IntNode(pos, value = value)
+ elif sy == 'LONG':
+ value = s.systring
+ s.next()
+ return ExprNodes.LongNode(pos, value = value)
+ elif sy == 'FLOAT':
+ value = s.systring
+ s.next()
+ return ExprNodes.FloatNode(pos, value = value)
+ elif sy == 'IMAG':
+ value = s.systring[:-1]
+ s.next()
+ return ExprNodes.ImagNode(pos, value = value)
+ elif sy == 'STRING' or sy == 'BEGIN_STRING':
+ kind, value = p_cat_string_literal(s)
+ if kind == 'c':
+ return ExprNodes.CharNode(pos, value = value)
+ else:
+ return ExprNodes.StringNode(pos, value = value)
+ elif sy == 'IDENT':
+ name = s.systring
+ s.next()
+ if name == "None":
+ return ExprNodes.NoneNode(pos)
+ else:
+ return ExprNodes.NameNode(pos, name=name)
+ elif sy == 'NULL':
+ s.next()
+ return ExprNodes.NullNode(pos)
+ else:
+ s.error("Expected an identifier or literal")
+
+def p_cat_string_literal(s):
+ # A sequence of one or more adjacent string literals.
+ # Returns (kind, value) where kind in ('', 'c', 'r')
+ kind, value = p_string_literal(s)
+ if kind <> 'c':
+ strings = [value]
+ while s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ next_kind, next_value = p_string_literal(s)
+ if next_kind == 'c':
+ self.error(
+ "Cannot concatenate char literal with another string or char literal")
+ strings.append(next_value)
+ value = ''.join(strings)
+ return kind, value
+
+def p_opt_string_literal(s):
+ if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ return p_string_literal(s)
+ else:
+ return None
+
+def p_string_literal(s):
+ # A single string or char literal.
+ # Returns (kind, value) where kind in ('', 'c', 'r')
+ if s.sy == 'STRING':
+ value = unquote(s.systring)
+ s.next()
+ return value
+ # s.sy == 'BEGIN_STRING'
+ pos = s.position()
+ #is_raw = s.systring[:1].lower() == "r"
+ kind = s.systring[:1].lower()
+ if kind not in "cr":
+ kind = ''
+ chars = []
+ while 1:
+ s.next()
+ sy = s.sy
+ #print "p_string_literal: sy =", sy, repr(s.systring) ###
+ if sy == 'CHARS':
+ systr = s.systring
+ if len(systr) == 1 and systr in "'\"\n":
+ chars.append('\\')
+ chars.append(systr)
+ elif sy == 'ESCAPE':
+ systr = s.systring
+ if kind == 'r':
+ if systr == '\\\n':
+ chars.append(r'\\\n')
+ elif systr == r'\"':
+ chars.append(r'\\\"')
+ elif systr == r'\\':
+ chars.append(r'\\\\')
+ else:
+ chars.append('\\' + systr)
+ else:
+ c = systr[1]
+ if c in "'\"\\abfnrtv01234567":
+ chars.append(systr)
+ elif c == 'x':
+ chars.append('\\x0' + systr[2:])
+ elif c == '\n':
+ pass
+ else:
+ chars.append(systr[1:])
+ elif sy == 'NEWLINE':
+ chars.append(r'\n')
+ elif sy == 'END_STRING':
+ break
+ elif sy == 'EOF':
+ s.error("Unclosed string literal", pos = pos)
+ else:
+ s.error(
+ "Unexpected token %r:%r in string literal" %
+ (sy, s.systring))
+ s.next()
+ value = join(chars, '')
+ #print "p_string_literal: value =", repr(value) ###
+ return kind, value
+
+def unquote(s):
+ is_raw = 0
+ if s[:1].lower() == "r":
+ is_raw = 1
+ s = s[1:]
+ q = s[:3]
+ if q == '"""' or q == "'''":
+ s = s[3:-3]
+ else:
+ s = s[1:-1]
+ if is_raw:
+ s = s.replace('\\', '\\\\')
+ s = s.replace('\n', '\\\n')
+ else:
+ # Split into double quotes, newlines, escape sequences
+ # and spans of regular chars
+ l1 = re.split(r'((?:\\[0-7]{1,3})|(?:\\x[0-9A-Fa-f]{2})|(?:\\.)|(?:\\\n)|(?:\n)|")', s)
+ print "unquote: l1 =", l1 ###
+ l2 = []
+ for item in l1:
+ if item == '"' or item == '\n':
+ l2.append('\\' + item)
+ elif item == '\\\n':
+ pass
+ elif item[:1] == '\\':
+ if len(item) == 2:
+ if item[1] in '"\\abfnrtv':
+ l2.append(item)
+ else:
+ l2.append(item[1])
+ elif item[1:2] == 'x':
+ l2.append('\\x0' + item[2:])
+ else:
+ # octal escape
+ l2.append(item)
+ else:
+ l2.append(item)
+ s = "".join(l2)
+ return s
+
+def p_list_maker(s):
+ # s.sy == '['
+ pos = s.position()
+ s.next()
+ exprs = p_simple_expr_list(s)
+ s.expect(']')
+ return ExprNodes.ListNode(pos, args = exprs)
+
+#dictmaker: test ':' test (',' test ':' test)* [',']
+
+def p_dict_maker(s):
+ # s.sy == '{'
+ pos = s.position()
+ s.next()
+ items = []
+ while s.sy <> '}':
+ key = p_simple_expr(s)
+ s.expect(':')
+ value = p_simple_expr(s)
+ items.append((key, value))
+ if s.sy <> ',':
+ break
+ s.next()
+ s.expect('}')
+ return ExprNodes.DictNode(pos, key_value_pairs = items)
+
+def p_backquote_expr(s):
+ # s.sy == '`'
+ pos = s.position()
+ s.next()
+ arg = p_expr(s)
+ s.expect('`')
+ return ExprNodes.BackquoteNode(pos, arg = arg)
+
+#testlist: test (',' test)* [',']
+
+def p_simple_expr_list(s):
+ exprs = []
+ while s.sy not in expr_terminators:
+ exprs.append(p_simple_expr(s))
+ if s.sy <> ',':
+ break
+ s.next()
+ return exprs
+
+def p_expr(s):
+ pos = s.position()
+ expr = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exprs = [expr] + p_simple_expr_list(s)
+ return ExprNodes.TupleNode(pos, args = exprs)
+ else:
+ return expr
+
+expr_terminators = (')', ']', '}', ':', '=', 'NEWLINE')
+
+#-------------------------------------------------------
+#
+# Statements
+#
+#-------------------------------------------------------
+
+def p_global_statement(s):
+ # assume s.sy == 'global'
+ pos = s.position()
+ s.next()
+ names = p_ident_list(s)
+ return Nodes.GlobalNode(pos, names = names)
+
+def p_expression_or_assignment(s):
+ expr_list = [p_expr(s)]
+ while s.sy == '=':
+ s.next()
+ expr_list.append(p_expr(s))
+ if len(expr_list) == 1:
+ expr = expr_list[0]
+ return Nodes.ExprStatNode(expr.pos, expr = expr)
+ else:
+ expr_list_list = []
+ flatten_parallel_assignments(expr_list, expr_list_list)
+ nodes = []
+ for expr_list in expr_list_list:
+ lhs_list = expr_list[:-1]
+ rhs = expr_list[-1]
+ if len(lhs_list) == 1:
+ node = Nodes.SingleAssignmentNode(rhs.pos,
+ lhs = lhs_list[0], rhs = rhs)
+ else:
+ node = Nodes.CascadedAssignmentNode(rhs.pos,
+ lhs_list = lhs_list, rhs = rhs)
+ nodes.append(node)
+ if len(nodes) == 1:
+ return nodes[0]
+ else:
+ #return Nodes.StatListNode(nodes[0].pos, stats = nodes)
+ return Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes)
+
+def flatten_parallel_assignments(input, output):
+ # The input is a list of expression nodes, representing
+ # the LHSs and RHS of one (possibly cascaded) assignment
+ # statement. If they are all sequence constructors with
+ # the same number of arguments, rearranges them into a
+ # list of equivalent assignments between the individual
+ # elements. This transformation is applied recursively.
+ size = find_parallel_assignment_size(input)
+ if size >= 0:
+ for i in range(size):
+ new_exprs = [expr.args[i] for expr in input]
+ flatten_parallel_assignments(new_exprs, output)
+ else:
+ output.append(input)
+
+def find_parallel_assignment_size(input):
+ # The input is a list of expression nodes. If
+ # they are all sequence constructors with the same number
+ # of arguments, return that number, else return -1.
+ # Produces an error message if they are all sequence
+ # constructors but not all the same size.
+ for expr in input:
+ if not expr.is_sequence_constructor:
+ return -1
+ rhs = input[-1]
+ rhs_size = len(rhs.args)
+ for lhs in input[:-1]:
+ lhs_size = len(lhs.args)
+ if lhs_size <> rhs_size:
+ error(lhs.pos, "Unpacking sequence of wrong size (expected %d, got %d)"
+ % (lhs_size, rhs_size))
+ return -1
+ return rhs_size
+
+def p_print_statement(s):
+ # s.sy == 'print'
+ pos = s.position()
+ s.next()
+ if s.sy == '>>':
+ s.error("'print >>' not yet implemented")
+ args = []
+ ewc = 0
+ if s.sy not in ('NEWLINE', 'EOF'):
+ args.append(p_simple_expr(s))
+ while s.sy == ',':
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ ewc = 1
+ break
+ args.append(p_simple_expr(s))
+ return Nodes.PrintStatNode(pos,
+ args = args, ends_with_comma = ewc)
+
+def p_del_statement(s):
+ # s.sy == 'del'
+ pos = s.position()
+ s.next()
+ args = p_simple_expr_list(s)
+ return Nodes.DelStatNode(pos, args = args)
+
+def p_pass_statement(s, with_newline = 0):
+ pos = s.position()
+ s.expect('pass')
+ if with_newline:
+ s.expect_newline("Expected a newline")
+ return Nodes.PassStatNode(pos)
+
+def p_break_statement(s):
+ # s.sy == 'break'
+ pos = s.position()
+ s.next()
+ return Nodes.BreakStatNode(pos)
+
+def p_continue_statement(s):
+ # s.sy == 'continue'
+ pos = s.position()
+ s.next()
+ return Nodes.ContinueStatNode(pos)
+
+def p_return_statement(s):
+ # s.sy == 'return'
+ pos = s.position()
+ s.next()
+ if s.sy not in statement_terminators:
+ value = p_expr(s)
+ else:
+ value = None
+ return Nodes.ReturnStatNode(pos, value = value)
+
+def p_raise_statement(s):
+ # s.sy == 'raise'
+ pos = s.position()
+ s.next()
+ exc_type = None
+ exc_value = None
+ exc_tb = None
+ if s.sy not in statement_terminators:
+ exc_type = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_value = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_tb = p_simple_expr(s)
+ return Nodes.RaiseStatNode(pos,
+ exc_type = exc_type,
+ exc_value = exc_value,
+ exc_tb = exc_tb)
+
+def p_import_statement(s):
+ # s.sy in ('import', 'cimport')
+ pos = s.position()
+ kind = s.sy
+ s.next()
+ items = [p_dotted_name(s, as_allowed = 1)]
+ while s.sy == ',':
+ s.next()
+ items.append(p_dotted_name(s, as_allowed = 1))
+ stats = []
+ for pos, target_name, dotted_name, as_name in items:
+ if kind == 'cimport':
+ stat = Nodes.CImportStatNode(pos,
+ module_name = dotted_name,
+ as_name = as_name)
+ else:
+ stat = Nodes.SingleAssignmentNode(pos,
+ lhs = ExprNodes.NameNode(pos,
+ name = as_name or target_name),
+ rhs = ExprNodes.ImportNode(pos,
+ module_name = ExprNodes.StringNode(pos,
+ value = dotted_name),
+ name_list = None))
+ stats.append(stat)
+ return Nodes.StatListNode(pos, stats = stats)
+
+def p_from_import_statement(s):
+ # s.sy == 'from'
+ pos = s.position()
+ s.next()
+ (dotted_name_pos, _, dotted_name, _) = \
+ p_dotted_name(s, as_allowed = 0)
+ if s.sy in ('import', 'cimport'):
+ kind = s.sy
+ s.next()
+ else:
+ s.error("Expected 'import' or 'cimport'")
+ if s.sy == '*':
+ s.error("'import *' not supported")
+ imported_names = [p_imported_name(s)]
+ while s.sy == ',':
+ s.next()
+ imported_names.append(p_imported_name(s))
+ if kind == 'cimport':
+ for (name_pos, name, as_name) in imported_names:
+ local_name = as_name or name
+ s.add_type_name(local_name)
+ return Nodes.FromCImportStatNode(pos,
+ module_name = dotted_name,
+ imported_names = imported_names)
+ else:
+ imported_name_strings = []
+ items = []
+ for (name_pos, name, as_name) in imported_names:
+ imported_name_strings.append(
+ ExprNodes.StringNode(name_pos, value = name))
+ items.append(
+ (name,
+ ExprNodes.NameNode(name_pos,
+ name = as_name or name)))
+ import_list = ExprNodes.ListNode(
+ imported_names[0][0], args = imported_name_strings)
+ return Nodes.FromImportStatNode(pos,
+ module = ExprNodes.ImportNode(dotted_name_pos,
+ module_name = ExprNodes.StringNode(dotted_name_pos,
+ value = dotted_name),
+ name_list = import_list),
+ items = items)
+
+def p_imported_name(s):
+ pos = s.position()
+ name = p_ident(s)
+ as_name = p_as_name(s)
+ return (pos, name, as_name)
+
+def p_dotted_name(s, as_allowed):
+ pos = s.position()
+ target_name = p_ident(s)
+ as_name = None
+ names = [target_name]
+ while s.sy == '.':
+ s.next()
+ names.append(p_ident(s))
+ if as_allowed:
+ as_name = p_as_name(s)
+ else:
+ as_name = None
+ return (pos, target_name, join(names, "."), as_name)
+
+def p_as_name(s):
+ if s.sy == 'IDENT' and s.systring == 'as':
+ s.next()
+ return p_ident(s)
+ else:
+ return None
+
+def p_assert_statement(s):
+ # s.sy == 'assert'
+ pos = s.position()
+ s.next()
+ cond = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ value = p_simple_expr(s)
+ else:
+ value = None
+ return Nodes.AssertStatNode(pos, cond = cond, value = value)
+
+statement_terminators = (';', 'NEWLINE', 'EOF')
+
+def p_if_statement(s):
+ # s.sy == 'if'
+ pos = s.position()
+ s.next()
+ if_clauses = [p_if_clause(s)]
+ while s.sy == 'elif':
+ s.next()
+ if_clauses.append(p_if_clause(s))
+ else_clause = p_else_clause(s)
+ return Nodes.IfStatNode(pos,
+ if_clauses = if_clauses, else_clause = else_clause)
+
+def p_if_clause(s):
+ pos = s.position()
+ test = p_simple_expr(s)
+ body = p_suite(s)
+ return Nodes.IfClauseNode(pos,
+ condition = test, body = body)
+
+def p_else_clause(s):
+ if s.sy == 'else':
+ s.next()
+ return p_suite(s)
+ else:
+ return None
+
+def p_while_statement(s):
+ # s.sy == 'while'
+ pos = s.position()
+ s.next()
+ test = p_simple_expr(s)
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.WhileStatNode(pos,
+ condition = test, body = body,
+ else_clause = else_clause)
+
+def p_for_statement(s):
+ # s.sy == 'for'
+ pos = s.position()
+ s.next()
+ target = p_for_target(s)
+ if s.sy == 'in':
+ s.next()
+ iterator = p_for_iterator(s)
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.ForInStatNode(pos,
+ target = target,
+ iterator = iterator,
+ body = body,
+ else_clause = else_clause)
+ elif s.sy == 'from':
+ s.next()
+ bound1 = p_bit_expr(s)
+ rel1 = p_for_from_relation(s)
+ name2_pos = s.position()
+ name2 = p_ident(s)
+ rel2_pos = s.position()
+ rel2 = p_for_from_relation(s)
+ bound2 = p_bit_expr(s)
+ if not target.is_name:
+ error(target.pos,
+ "Target of for-from statement must be a variable name")
+ elif name2 <> target.name:
+ error(name2_pos,
+ "Variable name in for-from range does not match target")
+ if rel1[0] <> rel2[0]:
+ error(rel2_pos,
+ "Relation directions in for-from do not match")
+ body = p_suite(s)
+ else_clause = p_else_clause(s)
+ return Nodes.ForFromStatNode(pos,
+ target = target,
+ bound1 = bound1,
+ relation1 = rel1,
+ relation2 = rel2,
+ bound2 = bound2,
+ body = body,
+ else_clause = else_clause)
+
+def p_for_from_relation(s):
+ if s.sy in inequality_relations:
+ op = s.sy
+ s.next()
+ return op
+ else:
+ s.error("Expected one of '<', '<=', '>' '>='")
+
+inequality_relations = ('<', '<=', '>', '>=')
+
+def p_for_target(s):
+ pos = s.position()
+ expr = p_bit_expr(s)
+ if s.sy == ',':
+ s.next()
+ exprs = [expr]
+ while s.sy <> 'in':
+ exprs.append(p_bit_expr(s))
+ if s.sy <> ',':
+ break
+ s.next()
+ return ExprNodes.TupleNode(pos, args = exprs)
+ else:
+ return expr
+
+def p_for_iterator(s):
+ pos = s.position()
+ expr = p_expr(s)
+ return ExprNodes.IteratorNode(pos, sequence = expr)
+
+def p_try_statement(s):
+ # s.sy == 'try'
+ pos = s.position()
+ s.next()
+ body = p_suite(s)
+ except_clauses = []
+ else_clause = None
+ if s.sy in ('except', 'else'):
+ while s.sy == 'except':
+ except_clauses.append(p_except_clause(s))
+ if s.sy == 'else':
+ s.next()
+ else_clause = p_suite(s)
+ return Nodes.TryExceptStatNode(pos,
+ body = body, except_clauses = except_clauses,
+ else_clause = else_clause)
+ elif s.sy == 'finally':
+ s.next()
+ finally_clause = p_suite(s)
+ return Nodes.TryFinallyStatNode(pos,
+ body = body, finally_clause = finally_clause)
+ else:
+ s.error("Expected 'except' or 'finally'")
+
+def p_except_clause(s):
+ # s.sy == 'except'
+ pos = s.position()
+ s.next()
+ exc_type = None
+ exc_value = None
+ if s.sy <> ':':
+ exc_type = p_simple_expr(s)
+ if s.sy == ',':
+ s.next()
+ exc_value = p_simple_expr(s)
+ body = p_suite(s)
+ return Nodes.ExceptClauseNode(pos,
+ pattern = exc_type, target = exc_value, body = body)
+
+def p_include_statement(s, level):
+ pos = s.position()
+ s.next() # 'include'
+ _, include_file_name = p_string_literal(s)
+ s.expect_newline("Syntax error in include statement")
+ include_file_path = s.context.find_include_file(include_file_name, pos)
+ if include_file_path:
+ f = open(include_file_path, "rU")
+ s2 = PyrexScanner(f, include_file_path, s)
+ try:
+ tree = p_statement_list(s2, level)
+ finally:
+ f.close()
+ return tree
+ else:
+ return None
+
+def p_simple_statement(s):
+ #print "p_simple_statement:", s.sy, s.systring ###
+ if s.sy == 'global':
+ node = p_global_statement(s)
+ elif s.sy == 'print':
+ node = p_print_statement(s)
+ elif s.sy == 'del':
+ node = p_del_statement(s)
+ elif s.sy == 'break':
+ node = p_break_statement(s)
+ elif s.sy == 'continue':
+ node = p_continue_statement(s)
+ elif s.sy == 'return':
+ node = p_return_statement(s)
+ elif s.sy == 'raise':
+ node = p_raise_statement(s)
+ elif s.sy in ('import', 'cimport'):
+ node = p_import_statement(s)
+ elif s.sy == 'from':
+ node = p_from_import_statement(s)
+ elif s.sy == 'assert':
+ node = p_assert_statement(s)
+ elif s.sy == 'pass':
+ node = p_pass_statement(s)
+ else:
+ node = p_expression_or_assignment(s)
+ return node
+
+def p_simple_statement_list(s):
+ # Parse a series of simple statements on one line
+ # separated by semicolons.
+ stat = p_simple_statement(s)
+ if s.sy == ';':
+ stats = [stat]
+ while s.sy == ';':
+ #print "p_simple_statement_list: maybe more to follow" ###
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ break
+ stats.append(p_simple_statement(s))
+ stat = Nodes.StatListNode(stats[0].pos, stats = stats)
+ s.expect_newline("Syntax error in simple statement list")
+ return stat
+
+def p_statement(s, level, cdef_flag = 0, visibility = 'private'):
+ #print "p_statement:", s.sy, s.systring ###
+ if s.sy == 'ctypedef':
+ if level not in ('module', 'module_pxd'):
+ s.error("ctypedef statement not allowed here")
+ return p_ctypedef_statement(s, level, visibility)
+ if s.sy == 'cdef':
+ cdef_flag = 1
+ s.next()
+ if cdef_flag:
+ if level not in ('module', 'module_pxd', 'function', 'c_class', 'c_class_pxd'):
+ s.error('cdef statement not allowed here')
+ return p_cdef_statement(s, level, visibility)
+ elif s.sy == 'def':
+ if level not in ('module', 'class', 'c_class', 'property'):
+ s.error('def statement not allowed here')
+ return p_def_statement(s)
+ elif s.sy == 'class':
+ if level <> 'module':
+ s.error("class definition not allowed here")
+ return p_class_statement(s)
+ elif s.sy == 'include':
+ if level not in ('module', 'module_pxd'):
+ s.error("include statement not allowed here")
+ return p_include_statement(s, level)
+ elif level == 'c_class' and s.sy == 'IDENT' and s.systring == 'property':
+ return p_property_decl(s)
+ else:
+ if level in ('c_class', 'c_class_pxd'):
+ if s.sy == 'pass':
+ return p_pass_statement(s, with_newline = 1)
+ else:
+ s.error("Executable statement not allowed here")
+ if s.sy == 'if':
+ return p_if_statement(s)
+ elif s.sy == 'while':
+ return p_while_statement(s)
+ elif s.sy == 'for':
+ return p_for_statement(s)
+ elif s.sy == 'try':
+ return p_try_statement(s)
+ else:
+ return p_simple_statement_list(s)
+
+def p_statement_list(s, level,
+ cdef_flag = 0, visibility = 'private'):
+ # Parse a series of statements separated by newlines.
+ #print "p_statement_list:", s.sy, s.systring ###
+ pos = s.position()
+ stats = []
+ while s.sy not in ('DEDENT', 'EOF'):
+ stats.append(p_statement(s, level,
+ cdef_flag = cdef_flag, visibility = visibility))
+ return Nodes.StatListNode(pos, stats = stats)
+
+def p_suite(s, level = 'other', cdef_flag = 0,
+ visibility = 'private', with_doc = 0):
+ pos = s.position()
+ s.expect(':')
+ doc = None
+ stmts = []
+ if s.sy == 'NEWLINE':
+ s.next()
+ s.expect_indent()
+ if with_doc:
+ doc = p_doc_string(s)
+ body = p_statement_list(s,
+ level = level,
+ cdef_flag = cdef_flag,
+ visibility = visibility)
+ s.expect_dedent()
+ else:
+ if level in ('module', 'class', 'function', 'other'):
+ body = p_simple_statement_list(s)
+ else:
+ body = p_pass_statement(s)
+ s.expect_newline("Syntax error in declarations")
+ if with_doc:
+ return doc, body
+ else:
+ return body
+
+def p_c_base_type(s, self_flag = 0):
+ # If self_flag is true, this is the base type for the
+ # self argument of a C method of an extension type.
+ if s.sy == '(':
+ return p_c_complex_base_type(s)
+ else:
+ return p_c_simple_base_type(s, self_flag)
+
+def p_c_complex_base_type(s):
+ # s.sy == '('
+ pos = s.position()
+ s.next()
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, empty = 1)
+ s.expect(')')
+ return Nodes.CComplexBaseTypeNode(pos,
+ base_type = base_type, declarator = declarator)
+
+def p_c_simple_base_type(s, self_flag):
+ #print "p_c_simple_base_type: self_flag =", self_flag
+ is_basic = 0
+ signed = 1
+ longness = 0
+ pos = s.position()
+ module_path = []
+ if looking_at_base_type(s):
+ #print "p_c_simple_base_type: looking_at_base_type at", s.position()
+ is_basic = 1
+ #signed = p_signed_or_unsigned(s)
+ #longness = p_short_or_long(s)
+ signed, longness = p_sign_and_longness(s)
+ if s.sy == 'IDENT' and s.systring in basic_c_type_names:
+ name = s.systring
+ s.next()
+ else:
+ name = 'int'
+ elif s.looking_at_type_name() or looking_at_dotted_name(s):
+ #print "p_c_simple_base_type: looking_at_type_name at", s.position()
+ name = s.systring
+ s.next()
+ while s.sy == '.':
+ module_path.append(name)
+ s.next()
+ name = p_ident(s)
+ else:
+ #print "p_c_simple_base_type: not looking at type at", s.position()
+ name = None
+ return Nodes.CSimpleBaseTypeNode(pos,
+ name = name, module_path = module_path,
+ is_basic_c_type = is_basic, signed = signed,
+ longness = longness, is_self_arg = self_flag)
+
+def looking_at_type(s):
+ return looking_at_base_type(s) or s.looking_at_type_name()
+
+def looking_at_base_type(s):
+ #print "looking_at_base_type?", s.sy, s.systring, s.position()
+ return s.sy == 'IDENT' and s.systring in base_type_start_words
+
+def looking_at_dotted_name(s):
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ result = s.sy == '.'
+ s.put_back('IDENT', name)
+ return result
+ else:
+ return 0
+
+#base_type_start_words = (
+# "char", "short", "int", "long", "float", "double",
+# "void", "signed", "unsigned"
+#)
+
+basic_c_type_names = ("void", "char", "int", "float", "double")
+
+sign_and_longness_words = ("short", "long", "signed", "unsigned")
+
+base_type_start_words = basic_c_type_names + sign_and_longness_words
+
+def p_sign_and_longness(s):
+ signed = 1
+ longness = 0
+ while s.sy == 'IDENT' and s.systring in sign_and_longness_words:
+ if s.systring == 'unsigned':
+ signed = 0
+ elif s.systring == 'short':
+ longness = -1
+ elif s.systring == 'long':
+ longness += 1
+ s.next()
+ return signed, longness
+
+#def p_signed_or_unsigned(s):
+# signed = 1
+# if s.sy == 'IDENT':
+# if s.systring == 'signed':
+# s.next()
+# elif s.systring == 'unsigned':
+# signed = 0
+# s.next()
+# return signed
+#
+#def p_short_or_long(s):
+# longness = 0
+# if s.sy == 'IDENT' and s.systring == 'short':
+# longness = -1
+# s.next()
+# else:
+# while s.sy == 'IDENT' and s.systring == 'long':
+# longness += 1
+# s.next()
+# return longness
+
+def p_opt_cname(s):
+ literal = p_opt_string_literal(s)
+ if literal:
+ _, cname = literal
+ else:
+ cname = None
+ return cname
+
+def p_c_declarator(s, empty = 0, is_type = 0, cmethod_flag = 0):
+ # If empty is true, the declarator must be
+ # empty, otherwise we don't care.
+ # If cmethod_flag is true, then if this declarator declares
+ # a function, it's a C method of an extension type.
+ pos = s.position()
+ if s.sy == '*':
+ s.next()
+ base = p_c_declarator(s, empty, is_type, cmethod_flag)
+ result = Nodes.CPtrDeclaratorNode(pos,
+ base = base)
+ elif s.sy == '**': # scanner returns this as a single token
+ s.next()
+ base = p_c_declarator(s, empty, is_type, cmethod_flag)
+ result = Nodes.CPtrDeclaratorNode(pos,
+ base = Nodes.CPtrDeclaratorNode(pos,
+ base = base))
+ else:
+ if s.sy == '(':
+ s.next()
+ result = p_c_declarator(s, empty, is_type, cmethod_flag)
+ s.expect(')')
+ else:
+ if s.sy == 'IDENT':
+ name = s.systring
+ if is_type:
+ s.add_type_name(name)
+ if empty:
+ error(s.position(), "Declarator should be empty")
+ s.next()
+ cname = p_opt_cname(s)
+ else:
+ name = ""
+ cname = None
+ result = Nodes.CNameDeclaratorNode(pos,
+ name = name, cname = cname)
+ while s.sy in ('[', '('):
+ if s.sy == '[':
+ s.next()
+ if s.sy <> ']':
+ dim = p_expr(s)
+ else:
+ dim = None
+ s.expect(']')
+ result = Nodes.CArrayDeclaratorNode(pos,
+ base = result, dimension = dim)
+ else: # sy == '('
+ s.next()
+ args = p_c_arg_list(s, in_pyfunc = 0, cmethod_flag = cmethod_flag)
+ ellipsis = p_optional_ellipsis(s)
+ s.expect(')')
+ exc_val, exc_check = p_exception_value_clause(s)
+ result = Nodes.CFuncDeclaratorNode(pos,
+ base = result, args = args, has_varargs = ellipsis,
+ exception_value = exc_val, exception_check = exc_check)
+ cmethod_flag = 0
+ return result
+
+def p_exception_value_clause(s):
+ exc_val = None
+ exc_check = 0
+ if s.sy == 'except':
+ s.next()
+ if s.sy == '*':
+ exc_check = 1
+ s.next()
+ else:
+ if s.sy == '?':
+ exc_check = 1
+ s.next()
+ exc_val = p_exception_value(s)
+ return exc_val, exc_check
+
+def p_exception_value(s):
+ sign = ""
+ if s.sy == "-":
+ sign = "-"
+ s.next()
+ if s.sy in ('INT', 'LONG', 'FLOAT', 'NULL'):
+ s.systring = sign + s.systring
+ return p_atom(s)
+ else:
+ s.error("Exception value must be an int or float literal or NULL")
+
+c_arg_list_terminators = ('*', '**', '.', ')')
+c_arg_list_trailers = ('.', '*', '**')
+
+def p_c_arg_list(s, in_pyfunc, cmethod_flag = 0):
+ args = []
+ if s.sy not in c_arg_list_terminators:
+ args.append(p_c_arg_decl(s, in_pyfunc, cmethod_flag))
+ while s.sy == ',':
+ s.next()
+ if s.sy in c_arg_list_trailers:
+ break
+ args.append(p_c_arg_decl(s, in_pyfunc))
+ return args
+
+def p_optional_ellipsis(s):
+ if s.sy == '.':
+ expect_ellipsis(s)
+ return 1
+ else:
+ return 0
+
+def p_c_arg_decl(s, in_pyfunc, cmethod_flag = 0):
+ pos = s.position()
+ not_none = 0
+ default = None
+ base_type = p_c_base_type(s, cmethod_flag)
+ declarator = p_c_declarator(s)
+ if s.sy == 'not':
+ s.next()
+ if s.sy == 'IDENT' and s.systring == 'None':
+ s.next()
+ else:
+ s.error("Expected 'None'")
+ if not in_pyfunc:
+ error(pos, "'not None' only allowed in Python functions")
+ not_none = 1
+ if s.sy == '=':
+ s.next()
+ default = p_simple_expr(s)
+ return Nodes.CArgDeclNode(pos,
+ base_type = base_type,
+ declarator = declarator,
+ not_none = not_none,
+ default = default)
+
+def p_cdef_statement(s, level, visibility = 'private'):
+ pos = s.position()
+ visibility = p_visibility(s, visibility)
+ if visibility == 'extern' and s.sy in ('from' ,':'):
+ return p_cdef_extern_block(s, level, pos)
+ elif s.sy == 'class':
+ if level not in ('module', 'module_pxd'):
+ error(pos, "Extension type definition not allowed here")
+ return p_c_class_definition(s, level, pos, visibility = visibility)
+ elif s.sy == 'IDENT' and s.systring in struct_union_or_enum:
+ if level not in ('module', 'module_pxd'):
+ error(pos, "C struct/union/enum definition not allowed here")
+ if visibility == 'public':
+ error(pos, "Public struct/union/enum definition not implemented")
+ if s.systring == "enum":
+ return p_c_enum_definition(s, pos)
+ else:
+ return p_c_struct_or_union_definition(s, pos)
+ elif s.sy == 'pass':
+ node = p_pass_statement(s)
+ s.expect_newline('Expected a newline')
+ return node
+ else:
+ return p_c_func_or_var_declaration(s, level, pos, visibility)
+
+def p_cdef_extern_block(s, level, pos):
+ include_file = None
+ s.expect('from')
+ if s.sy == '*':
+ s.next()
+ else:
+ _, include_file = p_string_literal(s)
+ body = p_suite(s, level, cdef_flag = 1, visibility = 'extern')
+ return Nodes.CDefExternNode(pos,
+ include_file = include_file,
+ body = body)
+
+struct_union_or_enum = (
+ "struct", "union", "enum"
+)
+
+def p_c_enum_definition(s, pos, typedef_flag = 0):
+ # s.sy == ident 'enum'
+ s.next()
+ if s.sy == 'IDENT':
+ name = s.systring
+ s.next()
+ s.add_type_name(name)
+ cname = p_opt_cname(s)
+ else:
+ name = None
+ cname = None
+ items = None
+ s.expect(':')
+ items = []
+ if s.sy <> 'NEWLINE':
+ p_c_enum_line(s, items)
+ else:
+ s.next() # 'NEWLINE'
+ s.expect_indent()
+ while s.sy not in ('DEDENT', 'EOF'):
+ p_c_enum_line(s, items)
+ s.expect_dedent()
+ return Nodes.CEnumDefNode(pos, name = name, cname = cname,
+ items = items, typedef_flag = typedef_flag)
+
+def p_c_enum_line(s, items):
+ if s.sy <> 'pass':
+ p_c_enum_item(s, items)
+ while s.sy == ',':
+ s.next()
+ if s.sy in ('NEWLINE', 'EOF'):
+ break
+ p_c_enum_item(s, items)
+ else:
+ s.next()
+ s.expect_newline("Syntax error in enum item list")
+
+def p_c_enum_item(s, items):
+ pos = s.position()
+ name = p_ident(s)
+ cname = p_opt_cname(s)
+ value = None
+ if s.sy == '=':
+ s.next()
+ value = p_simple_expr(s)
+ items.append(Nodes.CEnumDefItemNode(pos,
+ name = name, cname = cname, value = value))
+
+def p_c_struct_or_union_definition(s, pos, typedef_flag = 0):
+ # s.sy == ident 'struct' or 'union'
+ kind = s.systring
+ s.next()
+ name = p_ident(s)
+ cname = p_opt_cname(s)
+ s.add_type_name(name)
+ attributes = None
+ if s.sy == ':':
+ s.next()
+ s.expect('NEWLINE')
+ s.expect_indent()
+ attributes = []
+ while s.sy <> 'DEDENT':
+ if s.sy <> 'pass':
+ attributes.append(
+ p_c_func_or_var_declaration(s, level = 'other', pos = s.position()))
+ else:
+ s.next()
+ s.expect_newline("Expected a newline")
+ s.expect_dedent()
+ else:
+ s.expect_newline("Syntax error in struct or union definition")
+ return Nodes.CStructOrUnionDefNode(pos,
+ name = name, cname = cname, kind = kind, attributes = attributes,
+ typedef_flag = typedef_flag)
+
+def p_visibility(s, prev_visibility):
+ pos = s.position()
+ visibility = prev_visibility
+ if s.sy == 'IDENT' and s.systring in ('extern', 'public', 'readonly'):
+ visibility = s.systring
+ if prev_visibility <> 'private' and visibility <> prev_visibility:
+ s.error("Conflicting visibility options '%s' and '%s'"
+ % (prev_visibility, visibility))
+ s.next()
+ return visibility
+
+def p_c_func_or_var_declaration(s, level, pos, visibility = 'private'):
+ cmethod_flag = level in ('c_class', 'c_class_pxd')
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, cmethod_flag = cmethod_flag)
+ if s.sy == ':':
+ if level not in ('module', 'c_class'):
+ s.error("C function definition not allowed here")
+ suite = p_suite(s, 'function')
+ result = Nodes.CFuncDefNode(pos,
+ visibility = visibility,
+ base_type = base_type,
+ declarator = declarator,
+ body = suite)
+ else:
+ if level == 'module_pxd' and visibility <> 'extern':
+ error(pos,
+ "Only 'extern' C function or variable declaration allowed in .pxd file")
+ declarators = [declarator]
+ while s.sy == ',':
+ s.next()
+ if s.sy == 'NEWLINE':
+ break
+ declarator = p_c_declarator(s, cmethod_flag = cmethod_flag)
+ declarators.append(declarator)
+ s.expect_newline("Syntax error in C variable declaration")
+ result = Nodes.CVarDefNode(pos,
+ visibility = visibility,
+ base_type = base_type,
+ declarators = declarators)
+ return result
+
+def p_ctypedef_statement(s, level, visibility = 'private'):
+ # s.sy == 'ctypedef'
+ pos = s.position()
+ s.next()
+ visibility = p_visibility(s, visibility)
+ if s.sy == 'class':
+ return p_c_class_definition(s, level, pos,
+ visibility = visibility,
+ typedef_flag = 1)
+ elif s.sy == 'IDENT' and s.systring in ('struct', 'union', 'enum'):
+ if s.systring == 'enum':
+ return p_c_enum_definition(s, pos, typedef_flag = 1)
+ else:
+ return p_c_struct_or_union_definition(s, pos, typedef_flag = 1)
+ else:
+ base_type = p_c_base_type(s)
+ declarator = p_c_declarator(s, is_type = 1)
+ s.expect_newline("Syntax error in ctypedef statement")
+ return Nodes.CTypeDefNode(pos,
+ base_type = base_type, declarator = declarator)
+
+def p_def_statement(s):
+ # s.sy == 'def'
+ pos = s.position()
+ s.next()
+ name = p_ident(s)
+ args = []
+ s.expect('(');
+ args = p_c_arg_list(s, in_pyfunc = 1)
+ star_arg = None
+ starstar_arg = None
+ if s.sy == '*':
+ s.next()
+ star_arg = p_py_arg_decl(s)
+ if s.sy == ',':
+ s.next()
+ if s.sy == '**':
+ s.next()
+ starstar_arg = p_py_arg_decl(s)
+ elif s.sy == '**':
+ s.next()
+ starstar_arg = p_py_arg_decl(s)
+ s.expect(')')
+ doc, body = p_suite(s, 'function', with_doc = 1)
+ return Nodes.DefNode(pos, name = name, args = args,
+ star_arg = star_arg, starstar_arg = starstar_arg,
+ doc = doc, body = body)
+
+def p_py_arg_decl(s):
+ pos = s.position()
+ name = p_ident(s)
+ return Nodes.PyArgDeclNode(pos, name = name)
+
+def p_class_statement(s):
+ # s.sy == 'class'
+ pos = s.position()
+ s.next()
+ class_name = p_ident(s)
+ if s.sy == '(':
+ s.next()
+ base_list = p_simple_expr_list(s)
+ s.expect(')')
+ else:
+ base_list = []
+ doc, body = p_suite(s, 'class', with_doc = 1)
+ return Nodes.PyClassDefNode(pos,
+ name = class_name,
+ bases = ExprNodes.TupleNode(pos, args = base_list),
+ doc = doc, body = body)
+
+def p_c_class_definition(s, level, pos,
+ visibility = 'private', typedef_flag = 0):
+ # s.sy == 'class'
+ s.next()
+ module_path = []
+ class_name = p_ident(s)
+ while s.sy == '.':
+ s.next()
+ module_path.append(class_name)
+ class_name = p_ident(s)
+ if module_path and visibility <> 'extern':
+ error(pos, "Qualified class name only allowed for 'extern' C class")
+ if module_path and s.sy == 'IDENT' and s.systring == 'as':
+ s.next()
+ as_name = p_ident(s)
+ else:
+ as_name = class_name
+ s.add_type_name(as_name)
+ objstruct_name = None
+ typeobj_name = None
+ base_class_module = None
+ base_class_name = None
+ if s.sy == '(':
+ s.next()
+ base_class_path = [p_ident(s)]
+ while s.sy == '.':
+ s.next()
+ base_class_path.append(p_ident(s))
+ if s.sy == ',':
+ s.error("C class may only have one base class")
+ s.expect(')')
+ base_class_module = ".".join(base_class_path[:-1])
+ base_class_name = base_class_path[-1]
+ if s.sy == '[':
+ if visibility not in ('public', 'extern'):
+ error(s.position(), "Name options only allowed for 'public' or 'extern' C class")
+ objstruct_name, typeobj_name = p_c_class_options(s)
+ if s.sy == ':':
+ if level == 'module_pxd':
+ body_level = 'c_class_pxd'
+ else:
+ body_level = 'c_class'
+ doc, body = p_suite(s, body_level, with_doc = 1)
+ else:
+ s.expect_newline("Syntax error in C class definition")
+ doc = None
+ body = None
+ if visibility == 'extern':
+ if not module_path:
+ error(pos, "Module name required for 'extern' C class")
+ if typeobj_name:
+ error(pos, "Type object name specification not allowed for 'extern' C class")
+ elif visibility == 'public':
+ if not objstruct_name:
+ error(pos, "Object struct name specification required for 'public' C class")
+ if not typeobj_name:
+ error(pos, "Type object name specification required for 'public' C class")
+ return Nodes.CClassDefNode(pos,
+ visibility = visibility,
+ typedef_flag = typedef_flag,
+ module_name = ".".join(module_path),
+ class_name = class_name,
+ as_name = as_name,
+ base_class_module = base_class_module,
+ base_class_name = base_class_name,
+ objstruct_name = objstruct_name,
+ typeobj_name = typeobj_name,
+ in_pxd = level == 'module_pxd',
+ doc = doc,
+ body = body)
+
+def p_c_class_options(s):
+ objstruct_name = None
+ typeobj_name = None
+ s.expect('[')
+ while 1:
+ if s.sy <> 'IDENT':
+ break
+ if s.systring == 'object':
+ s.next()
+ objstruct_name = p_ident(s)
+ elif s.systring == 'type':
+ s.next()
+ typeobj_name = p_ident(s)
+ if s.sy <> ',':
+ break
+ s.next()
+ s.expect(']', "Expected 'object' or 'type'")
+ return objstruct_name, typeobj_name
+
+def p_property_decl(s):
+ pos = s.position()
+ s.next() # 'property'
+ name = p_ident(s)
+ doc, body = p_suite(s, 'property', with_doc = 1)
+ return Nodes.PropertyNode(pos, name = name, doc = doc, body = body)
+
+def p_doc_string(s):
+ if s.sy == 'STRING' or s.sy == 'BEGIN_STRING':
+ _, result = p_cat_string_literal(s)
+ if s.sy <> 'EOF':
+ s.expect_newline("Syntax error in doc string")
+ return result
+ else:
+ return None
+
+def p_module(s, pxd):
+ s.add_type_name("object")
+ pos = s.position()
+ doc = p_doc_string(s)
+ if pxd:
+ level = 'module_pxd'
+ else:
+ level = 'module'
+ body = p_statement_list(s, level)
+ if s.sy <> 'EOF':
+ s.error("Syntax error in statement [%s,%s]" % (
+ repr(s.sy), repr(s.systring)))
+ return Nodes.ModuleNode(pos, doc = doc, body = body)
+
+#----------------------------------------------
+#
+# Debugging
+#
+#----------------------------------------------
+
+def print_parse_tree(f, node, level, key = None):
+ ind = " " * level
+ if node:
+ f.write(ind)
+ if key:
+ f.write("%s: " % key)
+ t = type(node)
+ if t == TupleType:
+ f.write("(%s @ %s\n" % (node[0], node[1]))
+ for i in xrange(2, len(node)):
+ print_parse_tree(f, node[i], level+1)
+ f.write("%s)\n" % ind)
+ return
+ elif isinstance(node, Node):
+ try:
+ tag = node.tag
+ except AttributeError:
+ tag = node.__class__.__name__
+ f.write("%s @ %s\n" % (tag, node.pos))
+ for name, value in node.__dict__.items():
+ if name <> 'tag' and name <> 'pos':
+ print_parse_tree(f, value, level+1, name)
+ return
+ elif t == ListType:
+ f.write("[\n")
+ for i in xrange(len(node)):
+ print_parse_tree(f, node[i], level+1)
+ f.write("%s]\n" % ind)
+ return
+ f.write("%s%s\n" % (ind, node))
+
--- /dev/null
+#
+# Pyrex - Types
+#
+
+import string
+import Naming
+
+class PyrexType:
+ #
+ # Base class for all Pyrex types.
+ #
+ # is_pyobject boolean Is a Python object type
+ # is_extension_type boolean Is a Python extension type
+ # is_numeric boolean Is a C numeric type
+ # is_int boolean Is a C integer type
+ # is_float boolean Is a C floating point type
+ # is_void boolean Is the C void type
+ # is_array boolean Is a C array type
+ # is_ptr boolean Is a C pointer type
+ # is_null_ptr boolean Is the type of NULL
+ # is_cfunction boolean Is a C function type
+ # is_struct_or_union boolean Is a C struct or union type
+ # is_enum boolean Is a C enum type
+ # is_string boolean Is a C char * type
+ # is_returncode boolean Is used only to signal exceptions
+ # is_error boolean Is the dummy error type
+ # has_attributes boolean Has C dot-selectable attributes
+ # default_value string Initial value
+ # parsetuple_format string Format char for PyArg_ParseTuple
+ # pymemberdef_typecode string Type code for PyMemberDef struct
+ #
+ # declaration_code(entity_code,
+ # for_display = 0, dll_linkage = None, pyrex = 0)
+ # Returns a code fragment for the declaration of an entity
+ # of this type, given a code fragment for the entity.
+ # * If for_display, this is for reading by a human in an error
+ # message; otherwise it must be valid C code.
+ # * If dll_linkage is not None, it must be 'DL_EXPORT' or
+ # 'DL_IMPORT', and will be added to the base type part of
+ # the declaration.
+ # * If pyrex = 1, this is for use in a 'cdef extern'
+ # statement of a Pyrex include file.
+ #
+ # assignable_from(src_type)
+ # Tests whether a variable of this type can be
+ # assigned a value of type src_type.
+ #
+ # same_as(other_type)
+ # Tests whether this type represents the same type
+ # as other_type.
+ #
+ # as_argument_type():
+ # Coerces array type into pointer type for use as
+ # a formal argument type.
+ #
+
+ is_pyobject = 0
+ is_extension_type = 0
+ is_numeric = 0
+ is_int = 0
+ is_float = 0
+ is_void = 0
+ is_array = 0
+ is_ptr = 0
+ is_null_ptr = 0
+ is_cfunction = 0
+ is_struct_or_union = 0
+ is_enum = 0
+ is_string = 0
+ is_returncode = 0
+ is_error = 0
+ has_attributes = 0
+ default_value = ""
+ parsetuple_format = ""
+ pymemberdef_typecode = None
+
+ def resolve(self):
+ # If a typedef, returns the base type.
+ return self
+
+ def literal_code(self, value):
+ # Returns a C code fragment representing a literal
+ # value of this type.
+ return str(value)
+
+ def __str__(self):
+ return string.strip(self.declaration_code("", for_display = 1))
+
+ def same_as(self, other_type, **kwds):
+ return self.same_as_resolved_type(other_type.resolve(), **kwds)
+
+ def same_as_resolved_type(self, other_type):
+ return self is other_type or other_type is error_type
+
+ def subtype_of(self, other_type):
+ return self.subtype_of_resolved_type(other_type.resolve())
+
+ def subtype_of_resolved_type(self, other_type):
+ return self.same_as(other_type)
+
+ def assignable_from(self, src_type):
+ return self.assignable_from_resolved_type(src_type.resolve())
+
+ def assignable_from_resolved_type(self, src_type):
+ return self.same_as(src_type)
+
+ def as_argument_type(self):
+ return self
+
+ def is_complete(self):
+ # A type is incomplete if it is an unsized array,
+ # a struct whose attributes are not defined, etc.
+ return 1
+
+ def cast_code(self, expr_code):
+ return "((%s)%s)" % (self.declaration_code(""), expr_code)
+
+
+class CTypedefType:
+ #
+ # Type defined with a ctypedef statement in a
+ # 'cdef extern from' block. Delegates most attribute
+ # lookups to the base type.
+ #
+
+ def __init__(self, cname, base_type):
+ self.typedef_cname = cname
+ self.typedef_base_type = base_type
+
+ def resolve(self):
+ return self.typedef_base_type.resolve()
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ return "%s %s" % (self.typedef_cname, entity_code)
+
+ def __str__(self):
+ return self.typedef_cname
+
+ def __getattr__(self, name):
+ return getattr(self.typedef_base_type, name)
+
+
+class PyObjectType(PyrexType):
+ #
+ # Base class for all Python object types (reference-counted).
+ #
+
+ is_pyobject = 1
+ default_value = "0"
+ parsetuple_format = "O"
+ pymemberdef_typecode = "T_OBJECT"
+
+ def __str__(self):
+ return "Python object"
+
+ def __repr__(self):
+ return "PyObjectType"
+
+ def assignable_from(self, src_type):
+ return 1 # Conversion will be attempted
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return "object %s" % entity_code
+ else:
+ return "%s *%s" % (public_decl("PyObject", dll_linkage), entity_code)
+
+
+class PyExtensionType(PyObjectType):
+ #
+ # A Python extension type.
+ #
+ # name string
+ # scope CClassScope Attribute namespace
+ # visibility string
+ # typedef_flag boolean
+ # base_type PyExtensionType or None
+ # module_name string or None Qualified name of defining module
+ # objstruct_cname string Name of PyObject struct
+ # typeobj_cname string or None C code fragment referring to type object
+ # typeptr_cname string or None Name of pointer to external type object
+ # vtabslot_cname string Name of C method table member
+ # vtabstruct_cname string Name of C method table struct
+ # vtabptr_cname string Name of pointer to C method table
+ # vtable_cname string Name of C method table definition
+
+ is_extension_type = 1
+ has_attributes = 1
+
+ def __init__(self, name, typedef_flag, base_type):
+ self.name = name
+ self.scope = None
+ self.typedef_flag = typedef_flag
+ self.base_type = base_type
+ self.module_name = None
+ self.objstruct_cname = None
+ self.typeobj_cname = None
+ self.typeptr_cname = None
+ self.vtabslot_cname = None
+ self.vtabstruct_cname = None
+ self.vtabptr_cname = None
+ self.vtable_cname = None
+
+ def set_scope(self, scope):
+ self.scope = scope
+ if scope:
+ scope.parent_type = self
+
+ def subtype_of_resolved_type(self, other_type):
+ if other_type.is_extension_type:
+ return self is other_type or (
+ self.base_type and self.base_type.subtype_of(other_type))
+ else:
+ return other_type is py_object_type
+
+ def typeobj_is_available(self):
+ # Do we have a pointer to the type object?
+ return self.typeptr_cname
+
+ def typeobj_is_imported(self):
+ # If we don't know the C name of the type object but we do
+ # know which module it's defined in, it will be imported.
+ return self.typeobj_cname is None and self.module_name is not None
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return "%s %s" % (self.name, entity_code)
+ else:
+ if self.typedef_flag:
+ base_format = "%s"
+ else:
+ base_format = "struct %s"
+ base = public_decl(base_format % self.objstruct_cname, dll_linkage)
+ return "%s *%s" % (base, entity_code)
+
+ def attributes_known(self):
+ return self.scope is not None
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "PyExtensionType(%s%s)" % (self.scope.class_name,
+ ("", ".typedef_flag=1")[self.typedef_flag])
+
+
+class CType(PyrexType):
+ #
+ # Base class for all C types (non-reference-counted).
+ #
+ # to_py_function string C function for converting to Python object
+ # from_py_function string C function for constructing from Python object
+ #
+
+ to_py_function = None
+ from_py_function = None
+
+
+class CSimpleType(CType):
+ #
+ # Base class for all unstructured C types.
+ #
+ pass
+
+
+class CVoidType(CSimpleType):
+ is_void = 1
+
+ def __repr__(self):
+ return "<CVoidType>"
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ base = public_decl("void", dll_linkage)
+ return "%s %s" % (base, entity_code)
+
+ def is_complete(self):
+ return 0
+
+
+class CNumericType(CType):
+ #
+ # Base class for all C numeric types.
+ #
+ # rank integer Relative size
+ # signed boolean
+ #
+
+ is_numeric = 1
+ default_value = "0"
+
+ parsetuple_formats = ( # rank -> format
+ "?HIkK???", # unsigned
+ "chilLfd?", # signed
+ )
+
+ def __init__(self, rank, signed = 1, pymemberdef_typecode = None):
+ self.rank = rank
+ self.signed = signed
+ ptf = self.parsetuple_formats[signed][rank]
+ if ptf == '?':
+ ptf = None
+ self.parsetuple_format = ptf
+ self.pymemberdef_typecode = pymemberdef_typecode
+
+ def __repr__(self):
+ if self.signed:
+ u = ""
+ else:
+ u = "unsigned "
+ return "<CNumericType %s%s>" % (u, rank_to_type_name[self.rank])
+
+ def assignable_from_resolved_type(self, src_type):
+ return src_type.is_numeric or src_type is error_type
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if self.signed:
+ u = ""
+ else:
+ u = "unsigned "
+ base = public_decl(u + rank_to_type_name[self.rank], dll_linkage)
+ return "%s %s" % (base, entity_code)
+
+# return "%s%s %s" % (u, rank_to_type_name[self.rank], entity_code)
+
+
+class CIntType(CNumericType):
+
+ is_int = 1
+ typedef_flag = 0
+ to_py_function = "PyInt_FromLong"
+ from_py_function = "PyInt_AsLong"
+
+ def __init__(self, rank, signed, pymemberdef_typecode = None, is_returncode = 0):
+ CNumericType.__init__(self, rank, signed, pymemberdef_typecode)
+ self.is_returncode = is_returncode
+
+
+class CUIntType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLong"
+ from_py_function = "PyInt_AsUnsignedLongMask"
+
+
+class CULongType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLong"
+ from_py_function = "PyInt_AsUnsignedLongMask"
+
+
+class CLongLongType(CIntType):
+
+ to_py_function = "PyLong_FromLongLong"
+ from_py_function = "PyInt_AsUnsignedLongLongMask"
+
+
+class CULongLongType(CIntType):
+
+ to_py_function = "PyLong_FromUnsignedLongLong"
+ from_py_function = "PyInt_AsUnsignedLongLongMask"
+
+
+class CFloatType(CNumericType):
+
+ is_float = 1
+ to_py_function = "PyFloat_FromDouble"
+ from_py_function = "PyFloat_AsDouble"
+
+ def __init__(self, rank, pymemberdef_typecode = None):
+ CNumericType.__init__(self, rank, 1, pymemberdef_typecode)
+
+
+class CArrayType(CType):
+ # base_type CType Element type
+ # size integer or None Number of elements
+
+ is_array = 1
+
+ def __init__(self, base_type, size):
+ self.base_type = base_type
+ self.size = size
+ if base_type is c_char_type:
+ self.is_string = 1
+
+ def __repr__(self):
+ return "CArrayType(%s,%s)" % (self.size, repr(self.base_type))
+
+ def same_as_resolved_type(self, other_type):
+ return ((other_type.is_array and
+ self.base_type.same_as(other_type.base_type))
+ or other_type is error_type)
+
+ def assignable_from_resolved_type(self, src_type):
+ # Can't assign to a variable of an array type
+ return 0
+
+ def element_ptr_type(self):
+ return c_ptr_type(self.base_type)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if self.size is not None:
+ dimension_code = self.size
+ else:
+ dimension_code = ""
+ return self.base_type.declaration_code(
+ "(%s[%s])" % (entity_code, dimension_code),
+ for_display, dll_linkage, pyrex)
+
+ def as_argument_type(self):
+ return c_ptr_type(self.base_type)
+
+ def is_complete(self):
+ return self.size is not None
+
+
+class CPtrType(CType):
+ # base_type CType Referenced type
+
+ is_ptr = 1
+ default_value = 0
+
+ def __init__(self, base_type):
+ self.base_type = base_type
+
+ def __repr__(self):
+ return "CPtrType(%s)" % repr(self.base_type)
+
+ def same_as_resolved_type(self, other_type):
+ return ((other_type.is_ptr and
+ self.base_type.same_as(other_type.base_type))
+ or other_type is error_type)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ #print "CPtrType.declaration_code: pointer to", self.base_type ###
+ return self.base_type.declaration_code(
+ "(*%s)" % entity_code,
+ for_display, dll_linkage, pyrex)
+
+ def assignable_from_resolved_type(self, other_type):
+ if other_type is error_type:
+ return 1
+ elif self.base_type.is_cfunction and other_type.is_cfunction:
+ return self.base_type.same_as(other_type)
+ elif not other_type.is_ptr:
+ return 0
+ elif self.base_type.is_void:
+ return 1
+ elif other_type.is_null_ptr:
+ return 1
+ else:
+ return self.base_type.same_as(other_type.base_type)
+
+
+class CNullPtrType(CPtrType):
+
+ is_null_ptr = 1
+
+
+class CFuncType(CType):
+ # return_type CType
+ # args [CFuncTypeArg]
+ # has_varargs boolean
+ # exception_value string
+ # exception_check boolean True if PyErr_Occurred check needed
+
+ is_cfunction = 1
+
+ def __init__(self, return_type, args, has_varargs,
+ exception_value = None, exception_check = 0):
+ self.return_type = return_type
+ self.args = args
+ self.has_varargs = has_varargs
+ self.exception_value = exception_value
+ self.exception_check = exception_check
+
+ def __repr__(self):
+ arg_reprs = map(repr, self.args)
+ if self.has_varargs:
+ arg_reprs.append("...")
+ return "CFuncType(%s,[%s])" % (
+ repr(self.return_type),
+ string.join(arg_reprs, ","))
+
+ def same_c_signature_as(self, other_type, as_cmethod = 0):
+ return self.same_c_signature_as_resolved_type(
+ other_type.resolve(), as_cmethod)
+
+ def same_c_signature_as_resolved_type(self, other_type, as_cmethod):
+ if other_type is error_type:
+ return 1
+ if not other_type.is_cfunction:
+ return 0
+ nargs = len(self.args)
+ if nargs <> len(other_type.args):
+ return 0
+ # When comparing C method signatures, the first argument
+ # is exempt from compatibility checking (the proper check
+ # is performed elsewhere).
+ for i in range(as_cmethod, nargs):
+ if not self.args[i].type.same_as(
+ other_type.args[i].type):
+ return 0
+ if self.has_varargs <> other_type.has_varargs:
+ return 0
+ if not self.return_type.same_as(other_type.return_type):
+ return 0
+ return 1
+
+ def same_exception_signature_as(self, other_type):
+ return self.same_exception_signature_as_resolved_type(
+ other_type.resolve())
+
+ def same_exception_signature_as_resolved_type(self, other_type):
+ return self.exception_value == other_type.exception_value \
+ and self.exception_check == other_type.exception_check
+
+ def same_as_resolved_type(self, other_type, as_cmethod = 0):
+ return self.same_c_signature_as_resolved_type(other_type, as_cmethod) \
+ and self.same_exception_signature_as_resolved_type(other_type)
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ arg_decl_list = []
+ for arg in self.args:
+ arg_decl_list.append(
+ arg.type.declaration_code("", for_display, pyrex = pyrex))
+ if self.has_varargs:
+ arg_decl_list.append("...")
+ arg_decl_code = string.join(arg_decl_list, ",")
+ if not arg_decl_code and not pyrex:
+ arg_decl_code = "void"
+ exc_clause = ""
+ if pyrex or for_display:
+ if self.exception_value and self.exception_check:
+ exc_clause = " except? %s" % self.exception_value
+ elif self.exception_value:
+ exc_clause = " except %s" % self.exception_value
+ elif self.exception_check:
+ exc_clause = " except *"
+ return self.return_type.declaration_code(
+ "(%s(%s)%s)" % (entity_code, arg_decl_code, exc_clause),
+ for_display, dll_linkage, pyrex)
+
+
+class CFuncTypeArg:
+ # name string
+ # cname string
+ # type PyrexType
+ # pos source file position
+
+ def __init__(self, name, type, pos):
+ self.name = name
+ self.cname = Naming.var_prefix + name
+ self.type = type
+ self.pos = pos
+
+ def __repr__(self):
+ return "%s:%s" % (self.name, repr(self.type))
+
+ def declaration_code(self, for_display = 0):
+ return self.type.declaration_code(self.cname, for_display)
+
+
+class CStructOrUnionType(CType):
+ # name string
+ # cname string
+ # kind string "struct" or "union"
+ # scope StructOrUnionScope, or None if incomplete
+ # typedef_flag boolean
+
+ is_struct_or_union = 1
+ has_attributes = 1
+
+ def __init__(self, name, kind, scope, typedef_flag, cname):
+ self.name = name
+ self.cname = cname
+ self.kind = kind
+ self.scope = scope
+ self.typedef_flag = typedef_flag
+
+ def __repr__(self):
+ return "CStructOrUnionType(%s,%s%s)" % (self.name, self.cname,
+ ("", ",typedef_flag=1")[self.typedef_flag])
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return "%s %s" % (self.name, entity_code)
+ else:
+ if for_display:
+ base = self.name
+ elif self.typedef_flag:
+ base = self.cname
+ else:
+ base = "%s %s" % (self.kind, self.cname)
+ return "%s %s" % (public_decl(base, dll_linkage), entity_code)
+
+ def is_complete(self):
+ return self.scope is not None
+
+ def attributes_known(self):
+ return self.is_complete()
+
+
+class CEnumType(CIntType):
+ # name string
+ # cname string or None
+ # typedef_flag boolean
+
+ is_enum = 1
+ signed = 1
+ rank = 2
+
+ def __init__(self, name, cname, typedef_flag):
+ self.name = name
+ self.cname = cname
+ self.values = []
+ self.typedef_flag = typedef_flag
+
+ def __repr__(self):
+ return "CEnumType(%s,%s%s)" % (self.name, self.cname,
+ ("", ",typedef_flag=1")[self.typedef_flag])
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ if pyrex:
+ return "%s %s" % (self.cname, entity_code)
+ else:
+ if self.typedef_flag:
+ base = self.cname
+ else:
+ base = "enum %s" % self.cname
+ return "%s %s" % (public_decl(base, dll_linkage), entity_code)
+
+
+class CStringType:
+ # Mixin class for C string types.
+
+ is_string = 1
+
+ to_py_function = "PyString_FromString"
+ from_py_function = "PyString_AsString"
+
+ def literal_code(self, value):
+ return '"%s"' % value
+
+
+class CCharArrayType(CStringType, CArrayType):
+ # C 'char []' type.
+
+ parsetuple_format = "s"
+ pymemberdef_typecode = "T_STRING_INPLACE"
+
+ def __init__(self, size):
+ CArrayType.__init__(self, c_char_type, size)
+
+
+class CCharPtrType(CStringType, CPtrType):
+ # C 'char *' type.
+
+ parsetuple_format = "s"
+ pymemberdef_typecode = "T_STRING"
+
+ def __init__(self):
+ CPtrType.__init__(self, c_char_type)
+
+
+class ErrorType(PyrexType):
+ # Used to prevent propagation of error messages.
+
+ is_error = 1
+ exception_value = "0"
+ exception_check = 0
+ to_py_function = "dummy"
+ from_py_function = "dummy"
+
+ def declaration_code(self, entity_code,
+ for_display = 0, dll_linkage = None, pyrex = 0):
+ return "<error>"
+
+ def same_as_resolved_type(self, other_type):
+ return 1
+
+
+py_object_type = PyObjectType()
+
+c_void_type = CVoidType()
+c_void_ptr_type = CPtrType(c_void_type)
+c_void_ptr_ptr_type = CPtrType(c_void_ptr_type)
+
+c_char_type = CIntType(0, 1, "T_CHAR")
+c_short_type = CIntType(1, 1, "T_SHORT")
+c_int_type = CIntType(2, 1, "T_INT")
+c_long_type = CIntType(3, 1, "T_LONG")
+c_longlong_type = CLongLongType(4, 1, "T_LONGLONG")
+
+c_uchar_type = CIntType(0, 0, "T_UBYTE")
+c_ushort_type = CIntType(1, 0, "T_USHORT")
+c_uint_type = CUIntType(2, 0, "T_UINT")
+c_ulong_type = CULongType(3, 0, "T_ULONG")
+c_ulonglong_type = CULongLongType(4, 0, "T_ULONGLONG")
+
+c_float_type = CFloatType(5, "T_FLOAT")
+c_double_type = CFloatType(6, "T_DOUBLE")
+c_longdouble_type = CFloatType(7)
+
+c_null_ptr_type = CNullPtrType(c_void_type)
+c_char_array_type = CCharArrayType(None)
+c_char_ptr_type = CCharPtrType()
+c_char_ptr_ptr_type = CPtrType(c_char_ptr_type)
+c_int_ptr_type = CPtrType(c_int_type)
+
+c_returncode_type = CIntType(2, 1, "T_INT", is_returncode = 1)
+
+error_type = ErrorType()
+
+lowest_float_rank = 5
+
+rank_to_type_name = (
+ "char", # 0
+ "short", # 1
+ "int", # 2
+ "long", # 3
+ "PY_LONG_LONG", # 4
+ "float", # 5
+ "double", # 6
+ "long double", # 7
+)
+
+sign_and_rank_to_type = {
+ #(signed, rank)
+ (0, 0, ): c_uchar_type,
+ (0, 1): c_ushort_type,
+ (0, 2): c_uint_type,
+ (0, 3): c_ulong_type,
+ (0, 4): c_ulonglong_type,
+ (1, 0): c_char_type,
+ (1, 1): c_short_type,
+ (1, 2): c_int_type,
+ (1, 3): c_long_type,
+ (1, 4): c_longlong_type,
+ (1, 5): c_float_type,
+ (1, 6): c_double_type,
+ (1, 7): c_longdouble_type,
+}
+
+modifiers_and_name_to_type = {
+ #(signed, longness, name)
+ (0, 0, "char"): c_uchar_type,
+ (0, -1, "int"): c_ushort_type,
+ (0, 0, "int"): c_uint_type,
+ (0, 1, "int"): c_ulong_type,
+ (0, 2, "int"): c_ulonglong_type,
+ (1, 0, "void"): c_void_type,
+ (1, 0, "char"): c_char_type,
+ (1, -1, "int"): c_short_type,
+ (1, 0, "int"): c_int_type,
+ (1, 1, "int"): c_long_type,
+ (1, 2, "int"): c_longlong_type,
+ (1, 0, "float"): c_float_type,
+ (1, 0, "double"): c_double_type,
+ (1, 1, "double"): c_longdouble_type,
+ (1, 0, "object"): py_object_type,
+}
+
+def widest_numeric_type(type1, type2):
+ # Given two numeric types, return the narrowest type
+ # encompassing both of them.
+ signed = type1.signed
+ rank = max(type1.rank, type2.rank)
+ if rank >= lowest_float_rank:
+ signed = 1
+ return sign_and_rank_to_type[signed, rank]
+
+def simple_c_type(signed, longness, name):
+ # Find type descriptor for simple type given name and modifiers.
+ # Returns None if arguments don't make sense.
+ return modifiers_and_name_to_type.get((signed, longness, name))
+
+def c_array_type(base_type, size):
+ # Construct a C array type.
+ if base_type is c_char_type:
+ return CCharArrayType(size)
+ else:
+ return CArrayType(base_type, size)
+
+def c_ptr_type(base_type):
+ # Construct a C pointer type.
+ if base_type is c_char_type:
+ return c_char_ptr_type
+ else:
+ return CPtrType(base_type)
+
+def public_decl(base, dll_linkage):
+ if dll_linkage:
+ return "%s(%s)" % (dll_linkage, base)
+ else:
+ return base
+
+def same_type(type1, type2):
+ return type1.same_as(type2)
+
+def assignable_from(type1, type2):
+ return type1.assignable_from(type2)
+
+def typecast(to_type, from_type, expr_code):
+ # Return expr_code cast to a C type which can be
+ # assigned to to_type, assuming its existing C type
+ # is from_type.
+ if to_type is from_type or \
+ (not to_type.is_pyobject and assignable_from(to_type, from_type)):
+ return expr_code
+ else:
+ #print "typecast: to", to_type, "from", from_type ###
+ return to_type.cast_code(expr_code)
--- /dev/null
+#
+# Pyrex Scanner
+#
+
+#import pickle
+import cPickle as pickle
+
+import os
+import stat
+import sys
+from time import time
+
+from Pyrex import Plex
+from Pyrex.Plex import Scanner
+from Pyrex.Plex.Errors import UnrecognizedInput
+from Errors import CompileError, error
+from Lexicon import string_prefixes, make_lexicon
+
+plex_version = getattr(Plex, '_version', None)
+#print "Plex version:", plex_version ###
+
+debug_scanner = 0
+trace_scanner = 0
+scanner_debug_flags = 0
+scanner_dump_file = None
+binary_lexicon_pickle = 1
+notify_lexicon_unpickling = 0
+notify_lexicon_pickling = 1
+
+lexicon = None
+
+#-----------------------------------------------------------------
+
+def hash_source_file(path):
+ # Try to calculate a hash code for the given source file.
+ # Returns an empty string if the file cannot be accessed.
+ #print "Hashing", path ###
+ import md5
+ try:
+ try:
+ f = open(path, "rU")
+ text = f.read()
+ except IOError, e:
+ print "Unable to hash scanner source file (%s)" % e
+ return ""
+ finally:
+ f.close()
+ # Normalise spaces/tabs. We don't know what sort of
+ # space-tab substitution the file may have been
+ # through, so we replace all spans of spaces and
+ # tabs by a single space.
+ import re
+ text = re.sub("[ \t]+", " ", text)
+ hash = md5.new(text).hexdigest()
+ return hash
+
+def open_pickled_lexicon(expected_hash):
+ # Try to open pickled lexicon file and verify that
+ # it matches the source file. Returns the opened
+ # file if successful, otherwise None. ???
+ f = None
+ result = None
+ if os.path.exists(lexicon_pickle):
+ try:
+ f = open(lexicon_pickle, "rb")
+ actual_hash = pickle.load(f)
+ if actual_hash == expected_hash:
+ result = f
+ f = None
+ else:
+ print "Lexicon hash mismatch:" ###
+ print " expected", expected_hash ###
+ print " got ", actual_hash ###
+ except IOError, e:
+ print "Warning: Unable to read pickled lexicon", lexicon_pickle
+ print e
+ if f:
+ f.close()
+ return result
+
+def try_to_unpickle_lexicon():
+ global lexicon, lexicon_pickle, lexicon_hash
+ dir = os.path.dirname(__file__)
+ source_file = os.path.join(dir, "Lexicon.py")
+ lexicon_hash = hash_source_file(source_file)
+ lexicon_pickle = os.path.join(dir, "Lexicon.pickle")
+ f = open_pickled_lexicon(expected_hash = lexicon_hash)
+ if f:
+ if notify_lexicon_unpickling:
+ t0 = time()
+ print "Unpickling lexicon..."
+ lexicon = pickle.load(f)
+ f.close()
+ if notify_lexicon_unpickling:
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def create_new_lexicon():
+ global lexicon
+ t0 = time()
+ print "Creating lexicon..."
+ lexicon = make_lexicon()
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def pickle_lexicon():
+ f = None
+ try:
+ f = open(lexicon_pickle, "wb")
+ except IOError:
+ print "Warning: Unable to save pickled lexicon in", lexicon_pickle
+ if f:
+ if notify_lexicon_pickling:
+ t0 = time()
+ print "Pickling lexicon..."
+ pickle.dump(lexicon_hash, f, binary_lexicon_pickle)
+ pickle.dump(lexicon, f, binary_lexicon_pickle)
+ f.close()
+ if notify_lexicon_pickling:
+ t1 = time()
+ print "Done (%.2f seconds)" % (t1 - t0)
+
+def get_lexicon():
+ global lexicon
+ if not lexicon and plex_version is None:
+ try_to_unpickle_lexicon()
+ if not lexicon:
+ create_new_lexicon()
+ if plex_version is None:
+ pickle_lexicon()
+ return lexicon
+
+#------------------------------------------------------------------
+
+reserved_words = [
+ "global", "include", "ctypedef", "cdef", "def", "class",
+ "print", "del", "pass", "break", "continue", "return",
+ "raise", "import", "exec", "try", "except", "finally",
+ "while", "if", "elif", "else", "for", "in", "assert",
+ "and", "or", "not", "is", "in", "lambda", "from",
+ "NULL", "cimport"
+]
+
+class Method:
+
+ def __init__(self, name):
+ self.name = name
+ self.__name__ = name # for Plex tracing
+
+ def __call__(self, stream, text):
+ return getattr(stream, self.name)(text)
+
+#------------------------------------------------------------------
+
+def build_resword_dict():
+ d = {}
+ for word in reserved_words:
+ d[word] = 1
+ return d
+
+#------------------------------------------------------------------
+
+class PyrexScanner(Scanner):
+
+ resword_dict = build_resword_dict()
+
+ def __init__(self, file, filename, parent_scanner = None,
+ type_names = None, context = None):
+ Scanner.__init__(self, get_lexicon(), file, filename)
+ if parent_scanner:
+ self.context = parent_scanner.context
+ self.type_names = parent_scanner.type_names
+ else:
+ self.context = context
+ self.type_names = type_names
+ self.trace = trace_scanner
+ self.indentation_stack = [0]
+ self.indentation_char = None
+ self.bracket_nesting_level = 0
+ self.begin('INDENT')
+ self.sy = ''
+ self.next()
+
+ def current_level(self):
+ return self.indentation_stack[-1]
+
+ def open_bracket_action(self, text):
+ self.bracket_nesting_level = self.bracket_nesting_level + 1
+ return text
+
+ def close_bracket_action(self, text):
+ self.bracket_nesting_level = self.bracket_nesting_level - 1
+ return text
+
+ def newline_action(self, text):
+ if self.bracket_nesting_level == 0:
+ self.begin('INDENT')
+ self.produce('NEWLINE', '')
+
+ string_states = {
+ "'": 'SQ_STRING',
+ '"': 'DQ_STRING',
+ "'''": 'TSQ_STRING',
+ '"""': 'TDQ_STRING'
+ }
+
+ def begin_string_action(self, text):
+ if text[:1] in string_prefixes:
+ text = text[1:]
+ self.begin(self.string_states[text])
+ self.produce('BEGIN_STRING')
+
+ def end_string_action(self, text):
+ self.begin('')
+ self.produce('END_STRING')
+
+ def unclosed_string_action(self, text):
+ self.end_string_action(text)
+ self.error("Unclosed string literal")
+
+ def indentation_action(self, text):
+ self.begin('')
+ # Indentation within brackets should be ignored.
+ #if self.bracket_nesting_level > 0:
+ # return
+ # Check that tabs and spaces are being used consistently.
+ if text:
+ c = text[0]
+ #print "Scanner.indentation_action: indent with", repr(c) ###
+ if self.indentation_char is None:
+ self.indentation_char = c
+ #print "Scanner.indentation_action: setting indent_char to", repr(c)
+ else:
+ if self.indentation_char <> c:
+ self.error("Mixed use of tabs and spaces")
+ if text.replace(c, "") <> "":
+ self.error("Mixed use of tabs and spaces")
+ # Figure out how many indents/dedents to do
+ current_level = self.current_level()
+ new_level = len(text)
+ #print "Changing indent level from", current_level, "to", new_level ###
+ if new_level == current_level:
+ return
+ elif new_level > current_level:
+ #print "...pushing level", new_level ###
+ self.indentation_stack.append(new_level)
+ self.produce('INDENT', '')
+ else:
+ while new_level < self.current_level():
+ #print "...popping level", self.indentation_stack[-1] ###
+ self.indentation_stack.pop()
+ self.produce('DEDENT', '')
+ #print "...current level now", self.current_level() ###
+ if new_level <> self.current_level():
+ self.error("Inconsistent indentation")
+
+ def eof_action(self, text):
+ while len(self.indentation_stack) > 1:
+ self.produce('DEDENT', '')
+ self.indentation_stack.pop()
+ self.produce('EOF', '')
+
+ def next(self):
+ try:
+ sy, systring = self.read()
+ except UnrecognizedInput:
+ self.error("Unrecognized character")
+ if sy == 'IDENT' and systring in self.resword_dict:
+ sy = systring
+ self.sy = sy
+ self.systring = systring
+ if debug_scanner:
+ _, line, col = self.position()
+ if not self.systring or self.sy == self.systring:
+ t = self.sy
+ else:
+ t = "%s %s" % (self.sy, self.systring)
+ print "--- %3d %2d %s" % (line, col, t)
+
+ def put_back(self, sy, systring):
+ self.unread(self.sy, self.systring)
+ self.sy = sy
+ self.systring = systring
+
+ def unread(self, token, value):
+ # This method should be added to Plex
+ self.queue.insert(0, (token, value))
+
+ def add_type_name(self, name):
+ self.type_names[name] = 1
+
+ def looking_at_type_name(self):
+ return self.sy == 'IDENT' and self.systring in self.type_names
+
+ def error(self, message, pos = None):
+ if pos is None:
+ pos = self.position()
+ if self.sy == 'INDENT':
+ error(pos, "Possible inconsistent indentation")
+ raise error(pos, message)
+
+ def expect(self, what, message = None):
+ if self.sy == what:
+ self.next()
+ else:
+ if message:
+ self.error(message)
+ else:
+ self.error("Expected '%s'" % what)
+
+ def expect_indent(self):
+ self.expect('INDENT',
+ "Expected an increase in indentation level")
+
+ def expect_dedent(self):
+ self.expect('DEDENT',
+ "Expected a decrease in indentation level")
+
+ def expect_newline(self, message):
+ # Expect either a newline or end of file
+ if self.sy <> 'EOF':
+ self.expect('NEWLINE', message)
--- /dev/null
+#
+# Pyrex - Symbol Table
+#
+
+import re
+from Errors import error, InternalError
+import Options
+import Naming
+from PyrexTypes import c_int_type, \
+ py_object_type, c_char_array_type, \
+ CEnumType, CStructOrUnionType, PyExtensionType
+from TypeSlots import \
+ pyfunction_signature, pymethod_signature, \
+ get_special_method_signature, get_property_accessor_signature
+
+identifier_pattern = re.compile(r"[A-Za-z_][A-Za-z0-9_]*$")
+
+class Entry:
+ # A symbol table entry in a Scope or ModuleNamespace.
+ #
+ # name string Python name of entity
+ # cname string C name of entity
+ # type PyrexType Type of entity
+ # doc string Doc string
+ # init string Initial value
+ # visibility 'private' or 'public' or 'extern'
+ # is_builtin boolean Is a Python builtin name
+ # is_cglobal boolean Is a C global variable
+ # is_pyglobal boolean Is a Python module-level variable
+ # or class attribute during
+ # class construction
+ # is_variable boolean Is a variable
+ # is_cfunction boolean Is a C function
+ # is_cmethod boolean Is a C method of an extension type
+ # is_type boolean Is a type definition
+ # is_const boolean Is a constant
+ # is_property boolean Is a property of an extension type:
+ # doc_cname string or None C const holding the docstring
+ # getter_cname string C func for getting property
+ # setter_cname string C func for setting or deleting property
+ # is_self_arg boolean Is the "self" arg of an exttype method
+ # is_readonly boolean Can't be assigned to
+ # func_cname string C func implementing Python func
+ # pos position Source position where declared
+ # namespace_cname string If is_pyglobal, the C variable
+ # holding its home namespace
+ # pymethdef_cname string PyMethodDef structure
+ # signature Signature Arg & return types for Python func
+ # init_to_none boolean True if initial value should be None
+ # as_variable Entry Alternative interpretation of extension
+ # type name as a variable
+ # xdecref_cleanup boolean Use Py_XDECREF for error cleanup
+ # in_cinclude boolean Suppress C declaration code
+ # enum_values [Entry] For enum types, list of values
+ # qualified_name string "modname.funcname" or "modname.classname"
+ # or "modname.classname.funcname"
+ # is_declared_generic boolean Is declared as PyObject * even though its
+ # type is an extension type
+ # as_module None Module scope, if a cimported module
+ # is_inherited boolean Is an inherited attribute of an extension type
+ # interned_cname string C name of interned name string
+ # pystring_cname string C name of Python version of string literal
+ # is_interned boolean For string const entries, value is interned
+
+ borrowed = 0
+ init = ""
+ visibility = 'private'
+ is_builtin = 0
+ is_cglobal = 0
+ is_pyglobal = 0
+ is_variable = 0
+ is_cfunction = 0
+ is_cmethod = 0
+ is_type = 0
+ is_const = 0
+ is_property = 0
+ doc_cname = None
+ getter_cname = None
+ setter_cname = None
+ is_self_arg = 0
+ is_declared_generic = 0
+ is_readonly = 0
+ func_cname = None
+ doc = None
+ init_to_none = 0
+ as_variable = None
+ xdecref_cleanup = 0
+ in_cinclude = 0
+ as_module = None
+ is_inherited = 0
+ interned_cname = None
+ pystring_cname = None
+ is_interned = 0
+
+ def __init__(self, name, cname, type, pos = None, init = None):
+ self.name = name
+ self.cname = cname
+ self.type = type
+ self.pos = pos
+ self.init = init
+
+
+class Scope:
+ # name string Unqualified name
+ # outer_scope Scope or None Enclosing scope
+ # entries {string : Entry} Python name to entry, non-types
+ # const_entries [Entry] Constant entries
+ # sue_entries [Entry] Struct/union/enum entries
+ # arg_entries [Entry] Function argument entries
+ # var_entries [Entry] User-defined variable entries
+ # pyfunc_entries [Entry] Python function entries
+ # cfunc_entries [Entry] C function entries
+ # c_class_entries [Entry] All extension type entries
+ # temp_entries [Entry] Temporary variable entries
+ # free_temp_entries [Entry] Temp variables currently unused
+ # temp_counter integer Counter for naming temp vars
+ # cname_to_entry {string : Entry} Temp cname to entry mapping
+ # pow_function_used boolean The C pow() function is used
+ # return_type PyrexType or None Return type of function owning scope
+ # is_py_class_scope boolean Is a Python class scope
+ # is_c_class_scope boolean Is an extension type scope
+ # scope_prefix string Disambiguator for C names
+ # in_cinclude boolean Suppress C declaration code
+ # qualified_name string "modname" or "modname.classname"
+ # pystring_entries [Entry] String const entries newly used as
+ # Python strings in this scope
+
+ is_py_class_scope = 0
+ is_c_class_scope = 0
+ scope_prefix = ""
+ in_cinclude = 0
+
+ def __init__(self, name, outer_scope, parent_scope):
+ # The outer_scope is the next scope in the lookup chain.
+ # The parent_scope is used to derive the qualified name of this scope.
+ self.name = name
+ self.outer_scope = outer_scope
+ self.parent_scope = parent_scope
+ mangled_name = "%d%s_" % (len(name), name)
+ qual_scope = self.qualifying_scope()
+ if qual_scope:
+ self.qualified_name = qual_scope.qualify_name(name)
+ self.scope_prefix = qual_scope.scope_prefix + mangled_name
+ else:
+ self.qualified_name = name
+ self.scope_prefix = mangled_name
+ self.entries = {}
+ self.const_entries = []
+ self.sue_entries = []
+ self.arg_entries = []
+ self.var_entries = []
+ self.pyfunc_entries = []
+ self.cfunc_entries = []
+ self.c_class_entries = []
+ self.defined_c_classes = []
+ self.imported_c_classes = {}
+ self.temp_entries = []
+ self.free_temp_entries = []
+ #self.pending_temp_entries = [] # TEMPORARY
+ self.temp_counter = 1
+ self.cname_to_entry = {}
+ self.pow_function_used = 0
+ self.string_to_entry = {}
+ self.pystring_entries = []
+
+ def __str__(self):
+ return "<%s %s>" % (self.__class__.__name__, self.qualified_name)
+
+ def intern(self, name):
+ return self.global_scope().intern(name)
+
+ def qualifying_scope(self):
+ return self.parent_scope
+
+ def mangle(self, prefix, name = None):
+ if name:
+ return "%s%s%s" % (prefix, self.scope_prefix, name)
+ else:
+ return self.parent_scope.mangle(prefix, self.name)
+
+ def mangle_internal(self, name):
+ # Mangle an internal name so as not to clash with any
+ # user-defined name in this scope.
+ prefix = "%s%s_" % (Naming.pyrex_prefix, name)
+ return self.mangle(prefix)
+ #return self.parent_scope.mangle(prefix, self.name)
+
+ def global_scope(self):
+ # Return the module-level scope containing this scope.
+ return self.outer_scope.global_scope()
+
+ def declare(self, name, cname, type, pos):
+ # Create new entry, and add to dictionary if
+ # name is not None. Reports an error if already
+ # declared.
+ dict = self.entries
+ if name and dict.has_key(name):
+ error(pos, "'%s' redeclared" % name)
+ entry = Entry(name, cname, type, pos = pos)
+ entry.in_cinclude = self.in_cinclude
+ if name:
+ entry.qualified_name = self.qualify_name(name)
+ dict[name] = entry
+ return entry
+
+ def qualify_name(self, name):
+ return "%s.%s" % (self.qualified_name, name)
+
+ def declare_const(self, name, type, value, pos, cname = None):
+ # Add an entry for a named constant.
+ if not cname:
+ if self.in_cinclude:
+ cname = name
+ else:
+ cname = self.mangle(Naming.enum_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_const = 1
+ entry.value = value
+ return entry
+
+ def declare_type(self, name, type, pos,
+ cname = None, visibility = 'private'):
+ # Add an entry for a type definition.
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.visibility = visibility
+ entry.is_type = 1
+ return entry
+
+ def declare_struct_or_union(self, name, kind, scope,
+ typedef_flag, pos, cname = None):
+ # Add an entry for a struct or union definition.
+ if not cname:
+ if self.in_cinclude:
+ cname = name
+ else:
+ cname = self.mangle(Naming.type_prefix, name)
+ entry = self.lookup_here(name)
+ if not entry:
+ type = CStructOrUnionType(name, kind, scope, typedef_flag, cname)
+ entry = self.declare_type(name, type, pos, cname)
+ self.sue_entries.append(entry)
+ else:
+ if not (entry.is_type and entry.type.is_struct_or_union):
+ error(pos, "'%s' redeclared" % name)
+ elif scope and entry.type.scope:
+ error(pos, "'%s' already defined" % name)
+ else:
+ self.check_previous_typedef_flag(entry, typedef_flag, pos)
+ if scope:
+ entry.type.scope = scope
+ if not scope and not entry.type.scope:
+ self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
+ return entry
+
+ def check_previous_typedef_flag(self, entry, typedef_flag, pos):
+ if typedef_flag <> entry.type.typedef_flag:
+ error(pos, "'%s' previously declared using '%s'" % (
+ entry.name, ("cdef", "ctypedef")[entry.type.typedef_flag]))
+
+ def declare_enum(self, name, pos, cname, typedef_flag):
+ if name:
+ if not cname:
+ if self.in_cinclude:
+ cname = name
+ else:
+ cname = self.mangle(Naming.type_prefix, name)
+ type = CEnumType(name, cname, typedef_flag)
+ else:
+ type = c_int_type
+ entry = self.declare_type(name, type, pos, cname = cname)
+ entry.enum_values = []
+ self.sue_entries.append(entry)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a variable.
+ if not cname:
+ if visibility <> 'private':
+ cname = name
+ else:
+ cname = self.mangle(Naming.var_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ entry.visibility = visibility
+ return entry
+
+ def declare_builtin(self, name, pos):
+ return self.outer_scope.declare_builtin(name, pos)
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a Python function.
+ entry = self.declare_var(name, py_object_type, pos)
+ entry.signature = pyfunction_signature
+ self.pyfunc_entries.append(entry)
+ return entry
+
+ def register_pyfunction(self, entry):
+ self.pyfunc_entries.append(entry)
+
+ def declare_cfunction(self, name, type, pos,
+ cname = None, visibility = 'private', defining = 0):
+ # Add an entry for a C function.
+ if not cname:
+ if visibility <> 'private':
+ cname = name
+ else:
+ cname = self.mangle(Naming.func_prefix, name)
+ entry = self.add_cfunction(name, type, pos, cname, visibility)
+ entry.func_cname = cname
+ return entry
+
+ def add_cfunction(self, name, type, pos, cname, visibility):
+ # Add a C function entry without giving it a func_cname.
+ entry = self.declare(name, cname, type, pos)
+ entry.is_cfunction = 1
+ entry.visibility = visibility
+ self.cfunc_entries.append(entry)
+ return entry
+
+ def find(self, name, pos):
+ # Look up name, report error if not found.
+ entry = self.lookup(name)
+ if entry:
+ return entry
+ else:
+ error(pos, "'%s' is not declared" % name)
+
+ def lookup(self, name):
+ # Look up name in this scope or an enclosing one.
+ # Return None if not found.
+ return (self.lookup_here(name)
+ or (self.outer_scope and self.outer_scope.lookup(name))
+ or None)
+
+ def lookup_here(self, name):
+ # Look up in this scope only, return None if not found.
+ return self.entries.get(name, None)
+
+ def lookup_target(self, name):
+ # Look up name in this scope only. Declare as Python
+ # variable if not found.
+ entry = self.lookup_here(name)
+ if not entry:
+ entry = self.declare_var(name, py_object_type, None)
+ return entry
+
+ def add_string_const(self, value):
+ # Add an entry for a string constant.
+ cname = self.new_const_cname()
+ entry = Entry("", cname, c_char_array_type, init = value)
+ self.const_entries.append(entry)
+ return entry
+
+ def get_string_const(self, value):
+ # Get entry for string constant. Returns an existing
+ # one if possible, otherwise creates a new one.
+ genv = self.global_scope()
+ entry = genv.string_to_entry.get(value)
+ if not entry:
+ entry = self.add_string_const(value)
+ genv.string_to_entry[value] = entry
+ return entry
+
+ def add_py_string(self, entry):
+ # If not already done, allocate a C name for a Python version of
+ # a string literal, and add it to the list of Python strings to
+ # be created at module init time. If the string resembles a
+ # Python identifier, it will be interned.
+ if not entry.pystring_cname:
+ value = entry.init
+ if identifier_pattern.match(value):
+ entry.pystring_cname = self.intern(value)
+ entry.is_interned = 1
+ else:
+ entry.pystring_cname = entry.cname + "p"
+ self.pystring_entries.append(entry)
+ self.global_scope().all_pystring_entries.append(entry)
+
+ def new_const_cname(self):
+ # Create a new globally-unique name for a constant.
+ return self.global_scope().new_const_cname()
+
+ def allocate_temp(self, type):
+ # Allocate a temporary variable of the given type from the
+ # free list if available, otherwise create a new one.
+ # Returns the cname of the variable.
+ for entry in self.free_temp_entries:
+ if entry.type == type:
+ self.free_temp_entries.remove(entry)
+ return entry.cname
+ n = self.temp_counter
+ self.temp_counter = n + 1
+ cname = "%s%d" % (Naming.pyrex_prefix, n)
+ entry = Entry("", cname, type)
+ if type.is_pyobject:
+ entry.init = "0"
+ self.cname_to_entry[entry.cname] = entry
+ self.temp_entries.append(entry)
+ return entry.cname
+
+ def allocate_temp_pyobject(self):
+ # Allocate a temporary PyObject variable.
+ return self.allocate_temp(py_object_type)
+
+ def release_temp(self, cname):
+ # Release a temporary variable for re-use.
+ if not cname: # can happen when type of an expr is void
+ return
+ entry = self.cname_to_entry[cname]
+ if entry in self.free_temp_entries:
+ raise InternalError("Temporary variable %s released more than once"
+ % cname)
+ self.free_temp_entries.append(entry)
+
+ def temps_in_use(self):
+ # Return a new list of temp entries currently in use.
+ return [entry for entry in self.temp_entries
+ if entry not in self.free_temp_entries]
+
+ #def recycle_pending_temps(self):
+ # # Obsolete
+ # pass
+
+ def use_utility_code(self, new_code):
+ self.global_scope().use_utility_code(new_code)
+
+ def generate_library_function_declarations(self, code):
+ # Generate extern decls for C library funcs used.
+ #if self.pow_function_used:
+ # code.putln("%s double pow(double, double);" % Naming.extern_c_macro)
+ pass
+
+ def defines_any(self, names):
+ # Test whether any of the given names are
+ # defined in this scope.
+ for name in names:
+ if name in self.entries:
+ return 1
+ return 0
+
+
+class BuiltinScope(Scope):
+ # The builtin namespace.
+
+ def __init__(self):
+ Scope.__init__(self, "__builtin__", None, None)
+
+ def declare_builtin(self, name, pos):
+ entry = self.declare(name, name, py_object_type, pos)
+ entry.is_builtin = 1
+ return entry
+
+
+class ModuleScope(Scope):
+ # module_name string Python name of the module
+ # module_cname string C name of Python module object
+ # #module_dict_cname string C name of module dict object
+ # method_table_cname string C name of method table
+ # doc string Module doc string
+ # doc_cname string C name of module doc string
+ # const_counter integer Counter for naming constants
+ # utility_code_used [string] Utility code to be included
+ # default_entries [Entry] Function argument default entries
+ # python_include_files [string] Standard Python headers to be included
+ # include_files [string] Other C headers to be included
+ # string_to_entry {string : Entry} Map string const to entry
+ # context Context
+ # parent_module Scope Parent in the import namespace
+ # module_entries {string : Entry} For cimport statements
+ # type_names {string : 1} Set of type names (used during parsing)
+ # pxd_file_loaded boolean Corresponding .pxd file has been processed
+ # cimported_modules [ModuleScope] Modules imported with cimport
+ # intern_map {string : string} Mapping from Python names to interned strs
+ # interned_names [string] Interned names pending generation of declarations
+ # all_pystring_entries [Entry] Python string consts from all scopes
+
+ def __init__(self, name, parent_module, context):
+ self.parent_module = parent_module
+ outer_scope = context.find_submodule("__builtin__")
+ Scope.__init__(self, name, outer_scope, parent_module)
+ self.module_name = name
+ self.context = context
+ self.module_cname = Naming.module_cname
+ self.module_dict_cname = Naming.moddict_cname
+ self.method_table_cname = Naming.methtable_cname
+ self.doc = ""
+ self.doc_cname = Naming.moddoc_cname
+ self.const_counter = 1
+ self.utility_code_used = []
+ self.default_entries = []
+ self.module_entries = {}
+ self.python_include_files = ["Python.h", "structmember.h"]
+ self.include_files = []
+ self.type_names = {}
+ self.pxd_file_loaded = 0
+ self.cimported_modules = []
+ self.intern_map = {}
+ self.interned_names = []
+ self.all_pystring_entries = []
+
+ def qualifying_scope(self):
+ return self.parent_module
+
+ def global_scope(self):
+ return self
+
+ def declare_builtin(self, name, pos):
+ entry = Scope.declare_builtin(self, name, pos)
+ entry.interned_cname = self.intern(name)
+ return entry
+
+ def intern(self, name):
+ intern_map = self.intern_map
+ cname = intern_map.get(name)
+ if not cname:
+ cname = Naming.interned_prefix + name
+ intern_map[name] = cname
+ self.interned_names.append(name)
+ return cname
+
+ def find_module(self, module_name, pos):
+ # Find a module in the import namespace, interpreting
+ # relative imports relative to this module's parent.
+ # Finds and parses the module's .pxd file if the module
+ # has not been referenced before.
+ return self.global_scope().context.find_module(
+ module_name, relative_to = self.parent_module, pos = pos)
+
+ def find_submodule(self, name):
+ # Find and return scope for a submodule of this module,
+ # creating a new empty one if necessary. Doesn't parse .pxd.
+ scope = self.lookup_submodule(name)
+ if not scope:
+ scope = ModuleScope(name,
+ parent_module = self, context = self.context)
+ self.module_entries[name] = scope
+ return scope
+
+ def lookup_submodule(self, name):
+ # Return scope for submodule of this module, or None.
+ return self.module_entries.get(name, None)
+
+ def add_include_file(self, filename):
+ if filename not in self.python_include_files \
+ and filename not in self.include_files:
+ self.include_files.append(filename)
+
+ def add_imported_module(self, scope):
+ if scope not in self.cimported_modules:
+ self.cimported_modules.append(scope)
+
+ def add_imported_entry(self, name, entry, pos):
+ if entry not in self.entries:
+ self.entries[name] = entry
+ else:
+ error(pos, "'%s' redeclared" % name)
+
+ def declare_module(self, name, scope, pos):
+ # Declare a cimported module. This is represented as a
+ # Python module-level variable entry with a module
+ # scope attached to it. Reports an error and returns
+ # None if previously declared as something else.
+ entry = self.lookup_here(name)
+ if entry:
+ if not (entry.is_pyglobal and not entry.as_module):
+ error(pos, "'%s' redeclared" % name)
+ return None
+ else:
+ entry = self.declare_var(name, py_object_type, pos)
+ entry.as_module = scope
+ self.cimported_modules.append(scope)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a global variable. If it is a Python
+ # object type, and not declared with cdef, it will live
+ # in the module dictionary, otherwise it will be a C
+ # global variable.
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ if not visibility in ('private', 'public', 'extern'):
+ error(pos, "Module-level variable cannot be declared %s" % visibility)
+ if not is_cdef:
+ if not (type.is_pyobject and not type.is_extension_type):
+ raise InternalError(
+ "Non-cdef global variable is not a generic Python object")
+ entry.is_pyglobal = 1
+ entry.namespace_cname = self.module_cname
+ if Options.intern_names:
+ entry.interned_cname = self.intern(name)
+ else:
+ entry.is_cglobal = 1
+ self.var_entries.append(entry)
+ return entry
+
+ def declare_global(self, name, pos):
+ entry = self.lookup_here(name)
+ if not entry:
+ self.declare_var(name, py_object_type, pos)
+
+ def add_default_value(self, type):
+ # Add an entry for holding a function argument
+ # default value.
+ cname = self.new_const_cname()
+ entry = Entry("", cname, type)
+ self.default_entries.append(entry)
+ return entry
+
+ def new_const_cname(self):
+ # Create a new globally-unique name for a constant.
+ n = self.const_counter
+ self.const_counter = n + 1
+ return "%s%d" % (Naming.const_prefix, n)
+
+ def use_utility_code(self, new_code):
+ # Add string to list of utility code to be included,
+ # if not already there (tested using 'is').
+ for old_code in self.utility_code_used:
+ if old_code is new_code:
+ return
+ self.utility_code_used.append(new_code)
+
+ def declare_c_class(self, name, pos, defining, implementing,
+ module_name, base_type, objstruct_cname, typeobj_cname,
+ visibility, typedef_flag):
+ #
+ #print "declare_c_class:", name
+ #print "...visibility =", visibility
+ #
+ # Look for previous declaration as a type
+ #
+ entry = self.lookup_here(name)
+ if entry:
+ type = entry.type
+ if not (entry.is_type and type.is_extension_type):
+ entry = None # Will cause an error when we redeclare it
+ else:
+ self.check_previous_typedef_flag(entry, typedef_flag, pos)
+ if base_type <> type.base_type:
+ error(pos, "Base type does not match previous declaration")
+ #
+ # Make a new entry if needed
+ #
+ if not entry:
+ type = PyExtensionType(name, typedef_flag, base_type)
+ if visibility == 'extern':
+ type.module_name = module_name
+ else:
+ type.module_name = self.qualified_name
+ type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
+ entry = self.declare_type(name, type, pos, visibility = visibility)
+ if objstruct_cname:
+ type.objstruct_cname = objstruct_cname
+ elif not entry.in_cinclude:
+ type.objstruct_cname = self.mangle(Naming.objstruct_prefix, name)
+ else:
+ error(entry.pos,
+ "Object name required for 'public' or 'extern' C class")
+ self.attach_var_entry_to_c_class(entry)
+ self.c_class_entries.append(entry)
+ #
+ # Check for re-definition and create scope if needed
+ #
+ if not type.scope:
+ if defining or implementing:
+ scope = CClassScope(name = name, outer_scope = self,
+ visibility = visibility)
+ if base_type:
+ scope.declare_inherited_c_attributes(base_type.scope)
+ type.set_scope(scope)
+ else:
+ self.check_for_illegal_incomplete_ctypedef(typedef_flag, pos)
+ else:
+ if defining and type.scope.defined:
+ error(pos, "C class '%s' already defined" % name)
+ elif implementing and type.scope.implemented:
+ error(pos, "C class '%s' already implemented" % name)
+ #
+ # Fill in options, checking for compatibility with any previous declaration
+ #
+ if implementing: # So that filenames in runtime exceptions refer to
+ entry.pos = pos # the .pyx file and not the .pxd file
+ if entry.visibility <> visibility:
+ error(pos, "Declaration of '%s' as '%s' conflicts with previous "
+ "declaration as '%s'" % (class_name, visibility, entry.visibility))
+ if objstruct_cname:
+ if type.objstruct_cname and type.objstruct_cname <> objstruct_cname:
+ error(pos, "Object struct name differs from previous declaration")
+ type.objstruct_cname = objstruct_cname
+ if typeobj_cname:
+ if type.typeobj_cname and type.typeobj_cname <> typeobj_cname:
+ error(pos, "Type object name differs from previous declaration")
+ type.typeobj_cname = typeobj_cname
+ #
+ # Return new or existing entry
+ #
+ return entry
+
+ def check_for_illegal_incomplete_ctypedef(self, typedef_flag, pos):
+ if typedef_flag and not self.in_cinclude:
+ error(pos, "Forward-referenced type must use 'cdef', not 'ctypedef'")
+
+ def allocate_vtable_names(self, entry):
+ # If extension type has a vtable, allocate vtable struct and
+ # slot names for it.
+ type = entry.type
+ if type.base_type and type.base_type.vtabslot_cname:
+ #print "...allocating vtabslot_cname because base type has one" ###
+ type.vtabslot_cname = "%s.%s" % (
+ Naming.obj_base_cname, type.base_type.vtabslot_cname)
+ elif type.scope and type.scope.cfunc_entries:
+ #print "...allocating vtabslot_cname because there are C methods" ###
+ type.vtabslot_cname = Naming.vtabslot_cname
+ if type.vtabslot_cname:
+ #print "...allocating other vtable related cnames" ###
+ type.vtabstruct_cname = self.mangle(Naming.vtabstruct_prefix, entry.name)
+ type.vtabptr_cname = self.mangle(Naming.vtabptr_prefix, entry.name)
+
+ def check_c_classes(self):
+ # Performs post-analysis checking and finishing up of extension types
+ # being implemented in this module. This is called only for the main
+ # .pyx file scope, not for cimported .pxd scopes.
+ #
+ # Checks all extension types declared in this scope to
+ # make sure that:
+ #
+ # * The extension type is implemented
+ # * All required object and type names have been specified or generated
+ # * All non-inherited C methods are implemented
+ #
+ # Also allocates a name for the vtable if needed.
+ #
+ debug_check_c_classes = 0
+ if debug_check_c_classes:
+ print "Scope.check_c_classes: checking scope", self.qualified_name
+ for entry in self.c_class_entries:
+ if debug_check_c_classes:
+ print "...entry", entry.name, entry
+ print "......type =", entry.type
+ print "......visibility =", entry.visibility
+ type = entry.type
+ name = entry.name
+ visibility = entry.visibility
+ # Check defined
+ if not type.scope:
+ error(entry.pos, "C class '%s' is declared but not defined" % name)
+ # Generate typeobj_cname
+ if visibility <> 'extern' and not type.typeobj_cname:
+ type.typeobj_cname = self.mangle(Naming.typeobj_prefix, name)
+ ## Generate typeptr_cname
+ #type.typeptr_cname = self.mangle(Naming.typeptr_prefix, name)
+ # Check C methods defined
+ if type.scope:
+ for method_entry in type.scope.cfunc_entries:
+ if not method_entry.is_inherited and not method_entry.func_cname:
+ error(method_entry.pos, "C method '%s' is declared but not defined" %
+ method_entry.name)
+ # Allocate vtable name if necessary
+ if type.vtabslot_cname:
+ #print "ModuleScope.check_c_classes: allocating vtable cname for", self ###
+ type.vtable_cname = self.mangle(Naming.vtable_prefix, entry.name)
+
+ def attach_var_entry_to_c_class(self, entry):
+ # The name of an extension class has to serve as both a type
+ # name and a variable name holding the type object. It is
+ # represented in the symbol table by a type entry with a
+ # variable entry attached to it. For the variable entry,
+ # we use a read-only C global variable whose name is an
+ # expression that refers to the type object.
+ var_entry = Entry(name = entry.name,
+ type = py_object_type,
+ pos = entry.pos,
+ cname = "((PyObject*)%s)" % entry.type.typeptr_cname)
+ var_entry.is_variable = 1
+ var_entry.is_cglobal = 1
+ var_entry.is_readonly = 1
+ entry.as_variable = var_entry
+
+
+class LocalScope(Scope):
+
+ def __init__(self, name, outer_scope):
+ Scope.__init__(self, name, outer_scope, outer_scope)
+
+ def mangle(self, prefix, name):
+ return prefix + name
+
+ def declare_arg(self, name, type, pos):
+ # Add an entry for an argument of a function.
+ cname = self.mangle(Naming.var_prefix, name)
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ if type.is_pyobject:
+ entry.init = "0"
+ #entry.borrowed = 1 # Not using borrowed arg refs for now
+ self.arg_entries.append(entry)
+ return entry
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a local variable.
+ if visibility in ('public', 'readonly'):
+ error(pos, "Local variable cannot be declared %s" % visibility)
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ entry.init_to_none = type.is_pyobject
+ self.var_entries.append(entry)
+ return entry
+
+ def declare_global(self, name, pos):
+ # Pull entry from global scope into local scope.
+ if self.lookup_here(name):
+ error(pos, "'%s' redeclared")
+ else:
+ entry = self.global_scope().lookup_target(name)
+ self.entries[name] = entry
+
+
+class StructOrUnionScope(Scope):
+ # Namespace of a C struct or union.
+
+ def __init__(self):
+ Scope.__init__(self, "?", None, None)
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for an attribute.
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.is_variable = 1
+ self.var_entries.append(entry)
+ if type.is_pyobject:
+ error(pos,
+ "C struct/union member cannot be a Python object")
+ if visibility <> 'private':
+ error(pos,
+ "C struct/union member cannot be declared %s" % visibility)
+ return entry
+
+
+class ClassScope(Scope):
+ # Abstract base class for namespace of
+ # Python class or extension type.
+ #
+ # class_name string Pyrex name of the class
+ # scope_prefix string Additional prefix for names
+ # declared in the class
+ # doc string or None Doc string
+
+ def __init__(self, name, outer_scope):
+ Scope.__init__(self, name, outer_scope, outer_scope)
+ self.class_name = name
+ self.doc = None
+
+ def add_string_const(self, value):
+ return self.outer_scope.add_string_const(value)
+
+
+class PyClassScope(ClassScope):
+ # Namespace of a Python class.
+ #
+ # class_dict_cname string C variable holding class dict
+ # class_obj_cname string C variable holding class object
+
+ is_py_class_scope = 1
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for a class attribute.
+ entry = Scope.declare_var(self, name, type, pos,
+ cname, visibility, is_cdef)
+ entry.is_pyglobal = 1
+ entry.namespace_cname = self.class_obj_cname
+ if Options.intern_names:
+ entry.interned_cname = self.intern(name)
+ return entry
+
+ def allocate_temp(self, type):
+ return self.outer_scope.allocate_temp(type)
+
+ def release_temp(self, cname):
+ self.outer_scope.release_temp(cname)
+
+ #def recycle_pending_temps(self):
+ # self.outer_scope.recycle_pending_temps()
+
+ def add_default_value(self, type):
+ return self.outer_scope.add_default_value(type)
+
+
+class CClassScope(ClassScope):
+ # Namespace of an extension type.
+ #
+ # parent_type CClassType
+ # #typeobj_cname string or None
+ # #objstruct_cname string
+ # method_table_cname string
+ # member_table_cname string
+ # getset_table_cname string
+ # has_pyobject_attrs boolean Any PyObject attributes?
+ # public_attr_entries boolean public/readonly attrs
+ # property_entries [Entry]
+ # defined boolean Defined in .pxd file
+ # implemented boolean Defined in .pyx file
+ # inherited_var_entries [Entry] Adapted var entries from base class
+
+ is_c_class_scope = 1
+
+ def __init__(self, name, outer_scope, visibility):
+ ClassScope.__init__(self, name, outer_scope)
+ if visibility <> 'extern':
+ self.method_table_cname = outer_scope.mangle(Naming.methtab_prefix, name)
+ self.member_table_cname = outer_scope.mangle(Naming.memtab_prefix, name)
+ self.getset_table_cname = outer_scope.mangle(Naming.gstab_prefix, name)
+ self.has_pyobject_attrs = 0
+ self.public_attr_entries = []
+ self.property_entries = []
+ self.inherited_var_entries = []
+ self.defined = 0
+ self.implemented = 0
+
+ def needs_gc(self):
+ # If the type or any of its base types have Python-valued
+ # C attributes, then it needs to participate in GC.
+ return self.has_pyobject_attrs or \
+ (self.parent_type.base_type and \
+ self.parent_type.base_type.scope.needs_gc())
+
+ def declare_var(self, name, type, pos,
+ cname = None, visibility = 'private', is_cdef = 0):
+ # Add an entry for an attribute.
+ if self.defined:
+ error(pos,
+ "C attributes cannot be added in implementation part of"
+ " extension type")
+ if get_special_method_signature(name):
+ error(pos,
+ "The name '%s' is reserved for a special method."
+ % name)
+ if not cname:
+ cname = name
+ entry = self.declare(name, cname, type, pos)
+ entry.visibility = visibility
+ entry.is_variable = 1
+ self.var_entries.append(entry)
+ if type.is_pyobject:
+ self.has_pyobject_attrs = 1
+ if visibility not in ('private', 'public', 'readonly'):
+ error(pos,
+ "Attribute of extension type cannot be declared %s" % visibility)
+ if visibility in ('public', 'readonly'):
+ if type.pymemberdef_typecode:
+ self.public_attr_entries.append(entry)
+ else:
+ error(pos,
+ "C attribute of type '%s' cannot be accessed from Python" % type)
+ if visibility == 'public' and type.is_extension_type:
+ error(pos,
+ "Non-generic Python attribute cannot be exposed for writing from Python")
+ return entry
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a method.
+ entry = self.declare(name, name, py_object_type, pos)
+ special_sig = get_special_method_signature(name)
+ if special_sig:
+ entry.signature = special_sig
+ # Special methods don't get put in the method table
+ else:
+ entry.signature = pymethod_signature
+ self.pyfunc_entries.append(entry)
+ return entry
+
+ def declare_cfunction(self, name, type, pos,
+ cname = None, visibility = 'private', defining = 0):
+ if get_special_method_signature(name):
+ error(pos, "Special methods must be declared with 'def', not 'cdef'")
+ args = type.args
+ if not args:
+ error(pos, "C method has no self argument")
+ elif not args[0].type.same_as(self.parent_type):
+ error(pos, "Self argument of C method does not match parent type")
+ entry = self.lookup_here(name)
+ if entry:
+ if not entry.is_cfunction:
+ error(pos, "'%s' redeclared" % name)
+ else:
+ if defining and entry.func_cname:
+ error(pos, "'%s' already defined" % name)
+ if not entry.type.same_as(type, as_cmethod = 1):
+ error(pos, "Signature does not match previous declaration")
+ else:
+ if self.defined:
+ error(pos,
+ "C method '%s' not previously declared in definition part of"
+ " extension type" % name)
+ entry = self.add_cfunction(name, type, pos, cname or name, visibility)
+ if defining:
+ entry.func_cname = self.mangle(Naming.func_prefix, name)
+ return entry
+
+ def add_cfunction(self, name, type, pos, cname, visibility):
+ # Add a cfunction entry without giving it a func_cname.
+ entry = ClassScope.add_cfunction(self, name, type, pos, cname, visibility)
+ entry.is_cmethod = 1
+ return entry
+
+ def declare_property(self, name, doc, pos):
+ entry = self.declare(name, name, py_object_type, pos)
+ entry.is_property = 1
+ entry.doc = doc
+ entry.scope = PropertyScope(name,
+ outer_scope = self.global_scope(), parent_scope = self)
+ entry.scope.parent_type = self.parent_type
+ self.property_entries.append(entry)
+ return entry
+
+ def declare_inherited_c_attributes(self, base_scope):
+ # Declare entries for all the C attributes of an
+ # inherited type, with cnames modified appropriately
+ # to work with this type.
+ def adapt(cname):
+ return "%s.%s" % (Naming.obj_base_cname, base_entry.cname)
+ for base_entry in \
+ base_scope.inherited_var_entries + base_scope.var_entries:
+ entry = self.declare(base_entry.name, adapt(base_entry.cname),
+ base_entry.type, None)
+ entry.is_variable = 1
+ self.inherited_var_entries.append(entry)
+ for base_entry in base_scope.cfunc_entries:
+ entry = self.add_cfunction(base_entry.name, base_entry.type, None,
+ adapt(base_entry.cname), base_entry.visibility)
+ entry.is_inherited = 1
+
+
+class PropertyScope(Scope):
+ # Scope holding the __get__, __set__ and __del__ methods for
+ # a property of an extension type.
+ #
+ # parent_type PyExtensionType The type to which the property belongs
+
+ def declare_pyfunction(self, name, pos):
+ # Add an entry for a method.
+ signature = get_property_accessor_signature(name)
+ if signature:
+ entry = self.declare(name, name, py_object_type, pos)
+ entry.signature = signature
+ return entry
+ else:
+ error(pos, "Only __get__, __set__ and __del__ methods allowed "
+ "in a property declaration")
+ return None
--- /dev/null
+#
+# Pyrex - Tables describing slots in the type object
+# and associated know-how.
+#
+
+import Naming
+import PyrexTypes
+
+class Signature:
+ # Method slot signature descriptor.
+ #
+ # has_dummy_arg boolean
+ # has_generic_args boolean
+ # fixed_arg_format string
+ # ret_format string
+ # error_value string
+ #
+ # The formats are strings made up of the following
+ # characters:
+ #
+ # 'O' Python object
+ # 'T' Python object of the type of 'self'
+ # 'v' void
+ # 'p' void *
+ # 'P' void **
+ # 'i' int
+ # 'I' int *
+ # 'l' long
+ # 's' char *
+ # 'S' char **
+ # 'r' int used only to signal exception
+ # '-' dummy 'self' argument (not used)
+ # '*' rest of args passed as generic Python
+ # arg tuple and kw dict (must be last
+ # char in format string)
+
+ format_map = {
+ 'O': PyrexTypes.py_object_type,
+ 'v': PyrexTypes.c_void_type,
+ 'p': PyrexTypes.c_void_ptr_type,
+ 'P': PyrexTypes.c_void_ptr_ptr_type,
+ 'i': PyrexTypes.c_int_type,
+ 'I': PyrexTypes.c_int_ptr_type,
+ 'l': PyrexTypes.c_long_type,
+ 's': PyrexTypes.c_char_ptr_type,
+ 'S': PyrexTypes.c_char_ptr_ptr_type,
+ 'r': PyrexTypes.c_returncode_type,
+ # 'T', '-' and '*' are handled otherwise
+ # and are not looked up in here
+ }
+
+ error_value_map = {
+ 'O': "0",
+ 'i': "-1",
+ 'l': "-1",
+ 'r': "-1",
+ }
+
+ def __init__(self, arg_format, ret_format):
+ self.has_dummy_arg = 0
+ self.has_generic_args = 0
+ if arg_format[:1] == '-':
+ self.has_dummy_arg = 1
+ arg_format = arg_format[1:]
+ if arg_format[-1:] == '*':
+ self.has_generic_args = 1
+ arg_format = arg_format[:-1]
+ self.fixed_arg_format = arg_format
+ self.ret_format = ret_format
+ self.error_value = self.error_value_map.get(ret_format, None)
+
+ def num_fixed_args(self):
+ return len(self.fixed_arg_format)
+
+ def is_self_arg(self, i):
+ return self.fixed_arg_format[i] == 'T'
+
+ def fixed_arg_type(self, i):
+ return self.format_map[self.fixed_arg_format[i]]
+
+ def return_type(self):
+ return self.format_map[self.ret_format]
+
+
+class SlotDescriptor:
+ # Abstract base class for type slot descriptors.
+ #
+ # slot_name string Member name of the slot in the type object
+ # is_initialised_dynamically Is initialised by code in the module init function
+
+ def __init__(self, slot_name, dynamic = 0):
+ self.slot_name = slot_name
+ self.is_initialised_dynamically = dynamic
+
+ def generate(self, scope, code):
+ if self.is_initialised_dynamically:
+ value = 0
+ else:
+ value = self.slot_code(scope)
+ code.putln("%s, /*%s*/" % (value, self.slot_name))
+
+ # Some C implementations have trouble statically
+ # initialising a global with a pointer to an extern
+ # function, so we initialise some of the type slots
+ # in the module init function instead.
+
+ def generate_dynamic_init_code(self, scope, code):
+ if self.is_initialised_dynamically:
+ value = self.slot_code(scope)
+ if value <> "0":
+ code.putln("%s.%s = %s;" % (
+ scope.parent_type.typeobj_cname,
+ self.slot_name,
+ value
+ )
+ )
+
+
+class FixedSlot(SlotDescriptor):
+ # Descriptor for a type slot with a fixed value.
+ #
+ # value string
+
+ def __init__(self, slot_name, value):
+ SlotDescriptor.__init__(self, slot_name)
+ self.value = value
+
+ def slot_code(self, scope):
+ return self.value
+
+
+class EmptySlot(FixedSlot):
+ # Descriptor for a type slot whose value is always 0.
+
+ def __init__(self, slot_name):
+ FixedSlot.__init__(self, slot_name, "0")
+
+
+class GCDependentSlot(SlotDescriptor):
+ # Descriptor for a slot whose value depends on whether
+ # the type participates in GC.
+
+ def __init__(self, slot_name, no_gc_value, gc_value, dynamic = 0):
+ SlotDescriptor.__init__(self, slot_name, dynamic)
+ self.no_gc_value = no_gc_value
+ self.gc_value = gc_value
+
+ def slot_code(self, scope):
+ if scope.has_pyobject_attrs:
+ return self.gc_value
+ else:
+ return self.no_gc_value
+
+
+class MethodSlot(SlotDescriptor):
+ # Type slot descriptor for a user-definable method.
+ #
+ # signature Signature
+ # method_name string The __xxx__ name of the method
+ # default string or None Default value of the slot
+
+ def __init__(self, signature, slot_name, method_name, default = None):
+ SlotDescriptor.__init__(self, slot_name)
+ self.signature = signature
+ self.slot_name = slot_name
+ self.method_name = method_name
+ self.default = default
+ method_name_to_slot[method_name] = self
+
+ def slot_code(self, scope):
+ entry = scope.lookup_here(self.method_name)
+ if entry:
+ return entry.func_cname
+ else:
+ return "0"
+
+
+class InternalMethodSlot(SlotDescriptor):
+ # Type slot descriptor for a method which is always
+ # synthesized by Pyrex.
+ #
+ # slot_name string Member name of the slot in the type object
+
+ def __init__(self, slot_name):
+ SlotDescriptor.__init__(self, slot_name)
+
+ def slot_code(self, scope):
+ return scope.mangle_internal(self.slot_name)
+
+
+class SyntheticSlot(InternalMethodSlot):
+ # Type slot descriptor for a synthesized method which
+ # dispatches to one or more user-defined methods depending
+ # on its arguments. If none of the relevant methods are
+ # defined, the method will not be synthesized and an
+ # alternative default value will be placed in the type
+ # slot.
+
+ def __init__(self, slot_name, user_methods, default_value):
+ InternalMethodSlot.__init__(self, slot_name)
+ self.user_methods = user_methods
+ self.default_value = default_value
+
+ def slot_code(self, scope):
+ if scope.defines_any(self.user_methods):
+ return InternalMethodSlot.slot_code(self, scope)
+ else:
+ return self.default_value
+
+
+class TypeFlagsSlot(SlotDescriptor):
+ # Descriptor for the type flags slot.
+
+ def slot_code(self, scope):
+ # Always add Py_TPFLAGS_HAVE_GC -- PyType_Ready doesn't seem to inherit it
+ value = "Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC"
+ #if scope.has_pyobject_attrs:
+ # value += "|Py_TPFLAGS_HAVE_GC"
+ return value
+
+
+class DocStringSlot(SlotDescriptor):
+ # Descriptor for the docstring slot.
+
+ def slot_code(self, scope):
+ if scope.doc is not None:
+ return '"%s"' % scope.doc
+ else:
+ return "0"
+
+
+class SuiteSlot(SlotDescriptor):
+ # Descriptor for a substructure of the type object.
+ #
+ # sub_slots [SlotDescriptor]
+
+ def __init__(self, sub_slots, slot_type, slot_name):
+ SlotDescriptor.__init__(self, slot_name)
+ self.sub_slots = sub_slots
+ self.slot_type = slot_type
+ substructures.append(self)
+
+ def substructure_cname(self, scope):
+ return "%s%s_%s" % (Naming.pyrex_prefix, self.slot_name, scope.class_name)
+
+ def slot_code(self, scope):
+ return "&%s" % self.substructure_cname(scope)
+
+ def generate_substructure(self, scope, code):
+ code.putln("")
+ code.putln(
+ "static %s %s = {" % (
+ self.slot_type,
+ self.substructure_cname(scope)))
+ for slot in self.sub_slots:
+ slot.generate(scope, code)
+ code.putln("};")
+
+substructures = [] # List of all SuiteSlot instances
+
+class MethodTableSlot(SlotDescriptor):
+ # Slot descriptor for the method table.
+
+ def slot_code(self, scope):
+ return scope.method_table_cname
+
+
+class MemberTableSlot(SlotDescriptor):
+ # Slot descriptor for the table of Python-accessible attributes.
+
+ def slot_code(self, scope):
+ if scope.public_attr_entries:
+ return scope.member_table_cname
+ else:
+ return "0"
+
+
+class GetSetSlot(SlotDescriptor):
+ # Slot descriptor for the table of attribute get & set methods.
+
+ def slot_code(self, scope):
+ if scope.property_entries:
+ return scope.getset_table_cname
+ else:
+ return "0"
+
+
+class BaseClassSlot(SlotDescriptor):
+ # Slot descriptor for the base class slot.
+
+ def __init__(self, name):
+ SlotDescriptor.__init__(self, name, dynamic = 1)
+
+ def generate_dynamic_init_code(self, scope, code):
+ base_type = scope.parent_type.base_type
+ if base_type:
+ code.putln("%s.%s = %s;" % (
+ scope.parent_type.typeobj_cname,
+ self.slot_name,
+ base_type.typeptr_cname))
+
+
+# The following dictionary maps __xxx__ method names to slot descriptors.
+
+method_name_to_slot = {}
+
+## The following slots are (or could be) initialised with an
+## extern function pointer.
+#
+#slots_initialised_from_extern = (
+# "tp_free",
+#)
+
+#------------------------------------------------------------------------------------------
+#
+# Utility functions for accessing slot table data structures
+#
+#------------------------------------------------------------------------------------------
+
+def get_special_method_signature(name):
+ # Given a method name, if it is a special method,
+ # return its signature, else return None.
+ slot = method_name_to_slot.get(name)
+ if slot:
+ return slot.signature
+ else:
+ return None
+
+def get_property_accessor_signature(name):
+ # Return signature of accessor for an extension type
+ # property, else None.
+ return property_accessor_signatures.get(name)
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for generic Python functions and methods.
+#
+#------------------------------------------------------------------------------------------
+
+pyfunction_signature = Signature("-*", "O")
+pymethod_signature = Signature("T*", "O")
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for the various kinds of function that
+# can appear in the type object and its substructures.
+#
+#------------------------------------------------------------------------------------------
+
+unaryfunc = Signature("T", "O") # typedef PyObject * (*unaryfunc)(PyObject *);
+binaryfunc = Signature("OO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
+ibinaryfunc = Signature("TO", "O") # typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
+ternaryfunc = Signature("OOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+iternaryfunc = Signature("TOO", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+callfunc = Signature("T*", "O") # typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+inquiry = Signature("T", "i") # typedef int (*inquiry)(PyObject *);
+ # typedef int (*coercion)(PyObject **, PyObject **);
+intargfunc = Signature("Ti", "O") # typedef PyObject *(*intargfunc)(PyObject *, int);
+intintargfunc = Signature("Tii", "O") # typedef PyObject *(*intintargfunc)(PyObject *, int, int);
+intobjargproc = Signature("TiO", 'r') # typedef int(*intobjargproc)(PyObject *, int, PyObject *);
+intintobjargproc = Signature("TiiO", 'r') # typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
+intintargproc = Signature("Tii", 'r')
+objargfunc = Signature("TO", "O")
+objobjargproc = Signature("TOO", 'r') # typedef int (*objobjargproc)(PyObject *, PyObject *, PyObject *);
+getreadbufferproc = Signature("TiP", 'i') # typedef int (*getreadbufferproc)(PyObject *, int, void **);
+getwritebufferproc = Signature("TiP", 'i') # typedef int (*getwritebufferproc)(PyObject *, int, void **);
+getsegcountproc = Signature("TI", 'i') # typedef int (*getsegcountproc)(PyObject *, int *);
+getcharbufferproc = Signature("TiS", 'i') # typedef int (*getcharbufferproc)(PyObject *, int, const char **);
+objargproc = Signature("TO", 'r') # typedef int (*objobjproc)(PyObject *, PyObject *);
+ # typedef int (*visitproc)(PyObject *, void *);
+ # typedef int (*traverseproc)(PyObject *, visitproc, void *);
+
+destructor = Signature("T", "v") # typedef void (*destructor)(PyObject *);
+# printfunc = Signature("TFi", 'r') # typedef int (*printfunc)(PyObject *, FILE *, int);
+ # typedef PyObject *(*getattrfunc)(PyObject *, char *);
+getattrofunc = Signature("TO", "O") # typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
+ # typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
+setattrofunc = Signature("TOO", 'r') # typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
+delattrofunc = Signature("TO", 'r')
+cmpfunc = Signature("TO", "i") # typedef int (*cmpfunc)(PyObject *, PyObject *);
+reprfunc = Signature("T", "O") # typedef PyObject *(*reprfunc)(PyObject *);
+hashfunc = Signature("T", "l") # typedef long (*hashfunc)(PyObject *);
+ # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+richcmpfunc = Signature("OOi", "O") # typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+getiterfunc = Signature("T", "O") # typedef PyObject *(*getiterfunc) (PyObject *);
+iternextfunc = Signature("T", "O") # typedef PyObject *(*iternextfunc) (PyObject *);
+descrgetfunc = Signature("TOO", "O") # typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
+descrsetfunc = Signature("TOO", 'r') # typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
+descrdelfunc = Signature("TO", 'r')
+initproc = Signature("T*", 'r') # typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
+ # typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
+ # typedef PyObject *(*allocfunc)(struct _typeobject *, int);
+
+#------------------------------------------------------------------------------------------
+#
+# Signatures for accessor methods of properties.
+#
+#------------------------------------------------------------------------------------------
+
+property_accessor_signatures = {
+ '__get__': Signature("T", "O"),
+ '__set__': Signature("TO", 'r'),
+ '__del__': Signature("T", 'r')
+}
+
+#------------------------------------------------------------------------------------------
+#
+# Descriptor tables for the slots of the various type object
+# substructures, in the order they appear in the structure.
+#
+#------------------------------------------------------------------------------------------
+
+PyNumberMethods = (
+ MethodSlot(binaryfunc, "nb_add", "__add__"),
+ MethodSlot(binaryfunc, "nb_subtract", "__sub__"),
+ MethodSlot(binaryfunc, "nb_multiply", "__mul__"),
+ MethodSlot(binaryfunc, "nb_divide", "__div__"),
+ MethodSlot(binaryfunc, "nb_remainder", "__mod__"),
+ MethodSlot(binaryfunc, "nb_divmod", "__divmod__"),
+ MethodSlot(ternaryfunc, "nb_power", "__pow__"),
+ MethodSlot(unaryfunc, "nb_negative", "__neg__"),
+ MethodSlot(unaryfunc, "nb_positive", "__pos__"),
+ MethodSlot(unaryfunc, "nb_absolute", "__abs__"),
+ MethodSlot(inquiry, "nb_nonzero", "__nonzero__"),
+ MethodSlot(unaryfunc, "nb_invert", "__invert__"),
+ MethodSlot(binaryfunc, "nb_lshift", "__lshift__"),
+ MethodSlot(binaryfunc, "nb_rshift", "__rshift__"),
+ MethodSlot(binaryfunc, "nb_and", "__and__"),
+ MethodSlot(binaryfunc, "nb_xor", "__xor__"),
+ MethodSlot(binaryfunc, "nb_or", "__or__"),
+ EmptySlot("nb_coerce"),
+ MethodSlot(unaryfunc, "nb_int", "__int__"),
+ MethodSlot(unaryfunc, "nb_long", "__long__"),
+ MethodSlot(unaryfunc, "nb_float", "__float__"),
+ MethodSlot(unaryfunc, "nb_oct", "__oct__"),
+ MethodSlot(unaryfunc, "nb_hex", "__hex__"),
+
+ # Added in release 2.0
+ MethodSlot(ibinaryfunc, "nb_inplace_add", "__iadd__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_subtract", "__isub__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_multiply", "__imul__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_divide", "__idiv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_remainder", "__imod__"),
+ MethodSlot(ternaryfunc, "nb_inplace_power", "__ipow__"), # NOT iternaryfunc!!!
+ MethodSlot(ibinaryfunc, "nb_inplace_lshift", "__ilshift__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_rshift", "__irshift__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_and", "__iand__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_xor", "__ixor__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_or", "__ior__"),
+
+ # Added in release 2.2
+ # The following require the Py_TPFLAGS_HAVE_CLASS flag
+ MethodSlot(binaryfunc, "nb_floor_divide", "__floordiv__"),
+ MethodSlot(binaryfunc, "nb_true_divide", "__truediv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_floor_divide", "__ifloordiv__"),
+ MethodSlot(ibinaryfunc, "nb_inplace_true_divide", "__itruediv__"),
+)
+
+PySequenceMethods = (
+ MethodSlot(inquiry, "sq_length", "__len__"), # EmptySlot("sq_length"), # mp_length used instead
+ EmptySlot("sq_concat"), # nb_add used instead
+ EmptySlot("sq_repeat"), # nb_multiply used instead
+ SyntheticSlot("sq_item", ["__getitem__"], "0"), #EmptySlot("sq_item"), # mp_subscript used instead
+ MethodSlot(intintargfunc, "sq_slice", "__getslice__"),
+ EmptySlot("sq_ass_item"), # mp_ass_subscript used instead
+ SyntheticSlot("sq_ass_slice", ["__setslice__", "__delslice__"], "0"),
+ MethodSlot(cmpfunc, "sq_contains", "__contains__"),
+ EmptySlot("sq_inplace_concat"), # nb_inplace_add used instead
+ EmptySlot("sq_inplace_repeat"), # nb_inplace_multiply used instead
+)
+
+PyMappingMethods = (
+ MethodSlot(inquiry, "mp_length", "__len__"),
+ MethodSlot(objargfunc, "mp_subscript", "__getitem__"),
+ SyntheticSlot("mp_ass_subscript", ["__setitem__", "__delitem__"], "0"),
+)
+
+PyBufferProcs = (
+ MethodSlot(getreadbufferproc, "bf_getreadbuffer", "__getreadbuffer__"),
+ MethodSlot(getwritebufferproc, "bf_getwritebuffer", "__getwritebuffer__"),
+ MethodSlot(getsegcountproc, "bf_getsegcount", "__getsegcount__"),
+ MethodSlot(getcharbufferproc, "bf_getcharbuffer", "__getcharbuffer__"),
+)
+
+#------------------------------------------------------------------------------------------
+#
+# The main slot table. This table contains descriptors for all the
+# top-level type slots, beginning with tp_dealloc, in the order they
+# appear in the type object.
+#
+#------------------------------------------------------------------------------------------
+
+slot_table = (
+ InternalMethodSlot("tp_dealloc"),
+ EmptySlot("tp_print"), #MethodSlot(printfunc, "tp_print", "__print__"),
+ EmptySlot("tp_getattr"),
+ EmptySlot("tp_setattr"),
+ MethodSlot(cmpfunc, "tp_compare", "__cmp__"),
+ MethodSlot(reprfunc, "tp_repr", "__repr__"),
+
+ SuiteSlot(PyNumberMethods, "PyNumberMethods", "tp_as_number"),
+ SuiteSlot(PySequenceMethods, "PySequenceMethods", "tp_as_sequence"),
+ SuiteSlot(PyMappingMethods, "PyMappingMethods", "tp_as_mapping"),
+
+ MethodSlot(hashfunc, "tp_hash", "__hash__"),
+ MethodSlot(callfunc, "tp_call", "__call__"),
+ MethodSlot(reprfunc, "tp_str", "__str__"),
+
+ SyntheticSlot("tp_getattro", ["__getattr__"], "0"), #"PyObject_GenericGetAttr"),
+ SyntheticSlot("tp_setattro", ["__setattr__", "__delattr__"], "0"), #"PyObject_GenericSetAttr"),
+
+ SuiteSlot(PyBufferProcs, "PyBufferProcs", "tp_as_buffer"),
+
+ TypeFlagsSlot("tp_flags"),
+ DocStringSlot("tp_doc"),
+
+ InternalMethodSlot("tp_traverse"),
+ InternalMethodSlot("tp_clear"),
+
+ # Later -- synthesize a method to split into separate ops?
+ MethodSlot(richcmpfunc, "tp_richcompare", "__richcmp__"),
+
+ EmptySlot("tp_weaklistoffset"),
+
+ MethodSlot(getiterfunc, "tp_iter", "__iter__"),
+ MethodSlot(iternextfunc, "tp_iternext", "__next__"),
+
+ MethodTableSlot("tp_methods"),
+ MemberTableSlot("tp_members"),
+ GetSetSlot("tp_getset"),
+
+ BaseClassSlot("tp_base"), #EmptySlot("tp_base"),
+ EmptySlot("tp_dict"),
+
+ SyntheticSlot("tp_descr_get", ["__get__"], "0"),
+ SyntheticSlot("tp_descr_set", ["__set__", "__delete__"], "0"),
+
+ EmptySlot("tp_dictoffset"),
+
+ MethodSlot(initproc, "tp_init", "__init__"),
+ EmptySlot("tp_alloc"), #FixedSlot("tp_alloc", "PyType_GenericAlloc"),
+ InternalMethodSlot("tp_new"),
+ # Some versions of Python 2.2 inherit the wrong value for tp_free when the
+ # type has GC but the base type doesn't, so we explicitly set it ourselves
+ # in that case.
+ GCDependentSlot("tp_free", "0", "_PyObject_GC_Del", dynamic = 1),
+
+ EmptySlot("tp_is_gc"),
+ EmptySlot("tp_bases"),
+ EmptySlot("tp_mro"),
+ EmptySlot("tp_cache"),
+ EmptySlot("tp_subclasses"),
+ EmptySlot("tp_weaklist"),
+)
+
+#------------------------------------------------------------------------------------------
+#
+# Descriptors for special methods which don't appear directly
+# in the type object or its substructures. These methods are
+# called from slot functions synthesized by Pyrex.
+#
+#------------------------------------------------------------------------------------------
+
+MethodSlot(initproc, "", "__new__")
+MethodSlot(destructor, "", "__dealloc__")
+MethodSlot(objobjargproc, "", "__setitem__")
+MethodSlot(objargproc, "", "__delitem__")
+MethodSlot(intintobjargproc, "", "__setslice__")
+MethodSlot(intintargproc, "", "__delslice__")
+MethodSlot(getattrofunc, "", "__getattr__")
+MethodSlot(setattrofunc, "", "__setattr__")
+MethodSlot(delattrofunc, "", "__delattr__")
+MethodSlot(descrgetfunc, "", "__get__")
+MethodSlot(descrsetfunc, "", "__set__")
+MethodSlot(descrdelfunc, "", "__delete__")
--- /dev/null
+version = '0.9.4.1'
--- /dev/null
+###############################################
+#
+# Odds and ends for debugging
+#
+###############################################
+
+def print_call_chain(*args):
+ import sys
+ print " ".join(map(str, args))
+ f = sys._getframe(2)
+ while f:
+ name = f.f_code.co_name
+ s = f.f_locals.get('self', None)
+ if s:
+ c = getattr(s, "__class__", None)
+ if c:
+ name = "%s.%s" % (c.__name__, name)
+ print "Called from:", name, f.f_lineno
+ f = f.f_back
+ print "-" * 70
--- /dev/null
+# July 2002, Graham Fawcett
+
+#
+
+# this hack was inspired by the way Thomas Heller got py2exe
+
+# to appear as a distutil command
+
+#
+
+# we replace distutils.command.build_ext with our own version
+
+# and keep the old one under the module name _build_ext,
+
+# so that *our* build_ext can make use of it.
+
+
+
+from build_ext import build_ext
+
+
+
--- /dev/null
+# Subclasses disutils.command.build_ext,
+# replacing it with a Pyrex version that compiles pyx->c
+# before calling the original build_ext command.
+# July 2002, Graham Fawcett
+# Modified by Darrell Gallion <dgallion1@yahoo.com>
+# to allow inclusion of .c files along with .pyx files.
+# Pyrex is (c) Greg Ewing.
+
+import distutils.command.build_ext
+#import Pyrex.Compiler.Main
+from Pyrex.Compiler.Main import CompilationOptions, default_options, compile
+from Pyrex.Compiler.Errors import PyrexError
+from distutils.dep_util import newer
+import os
+import sys
+
+def replace_suffix(path, new_suffix):
+ return os.path.splitext(path)[0] + new_suffix
+
+class build_ext (distutils.command.build_ext.build_ext):
+
+ description = "compile Pyrex scripts, then build C/C++ extensions (compile/link to build directory)"
+
+ def finalize_options (self):
+ distutils.command.build_ext.build_ext.finalize_options(self)
+
+ # The following hack should no longer be needed.
+ if 0:
+ # compiling with mingw32 gets an "initializer not a constant" error
+ # doesn't appear to happen with MSVC!
+ # so if we are compiling with mingw32,
+ # switch to C++ mode, to avoid the problem
+ if self.compiler == 'mingw32':
+ self.swig_cpp = 1
+
+ def swig_sources (self, sources, extension = None):
+ if not self.extensions:
+ return
+
+ # collect the names of the source (.pyx) files
+ pyx_sources = []
+ pyx_sources = [source for source in sources if source.endswith('.pyx')]
+ other_sources = [source for source in sources if not source.endswith('.pyx')]
+
+ #suffix = self.swig_cpp and '.cpp' or '.c'
+ suffix = '.c'
+ for pyx in pyx_sources:
+ # should I raise an exception if it doesn't exist?
+ if os.path.exists(pyx):
+ source = pyx
+ target = replace_suffix(source, suffix)
+ if newer(source, target) or self.force:
+ self.pyrex_compile(source)
+
+ return [replace_suffix(src, suffix) for src in pyx_sources] + other_sources
+
+ def pyrex_compile(self, source):
+ options = CompilationOptions(default_options,
+ include_path = self.include_dirs)
+ result = compile(source, options)
+ if result.num_errors <> 0:
+ sys.exit(1)
+
--- /dev/null
+#
+# Pyrex - Darwin system interface
+#
+
+verbose = 0
+gcc_pendantic = True
+gcc_warnings_are_errors = False
+
+import os
+from Pyrex.Utils import replace_suffix
+from Pyrex.Compiler.Errors import PyrexError
+
+py_include_dirs = [
+ "/Library/Frameworks/Python.framework/Headers"
+]
+
+compilers = ["gcc", "g++"]
+compiler_options = \
+ "-g -c -fno-strict-aliasing -Wno-long-double -no-cpp-precomp " \
+ "-mno-fused-madd -fno-common -dynamic " \
+ .split()
+if gcc_pendantic:
+ compiler_options.extend(["-pedantic", "-Wno-long-long"])
+if gcc_warnings_are_errors:
+ compiler_options.append("-Werror")
+
+linkers = ["gcc", "g++"]
+linker_options = \
+ "-Wl,-F.,-w -bundle -framework Python" \
+ .split()
+
+class CCompilerError(PyrexError):
+ pass
+
+def c_compile(c_file, verbose_flag = 0, cplus = 0, obj_suffix = ".o"):
+ # Compile the given C source file to produce
+ # an object file. Returns the pathname of the
+ # resulting file.
+ c_file = os.path.join(os.getcwd(), c_file)
+ o_file = replace_suffix(c_file, obj_suffix)
+ include_options = []
+ for dir in py_include_dirs:
+ include_options.append("-I%s" % dir)
+ compiler = compilers[bool(cplus)]
+ args = [compiler] + compiler_options + include_options + [c_file, "-o", o_file]
+ if verbose_flag or verbose:
+ print " ".join(args)
+ status = os.spawnvp(os.P_WAIT, compiler, args)
+ if status <> 0:
+ raise CCompilerError("C compiler returned status %s" % status)
+ return o_file
+
+def c_link(obj_file, verbose_flag = 0, extra_objects = [], cplus = 0):
+ return c_link_list([obj_file] + extra_objects, verbose_flag, cplus)
+
+def c_link_list(obj_files, verbose_flag = 0, cplus = 0):
+ # Link the given object files into a dynamically
+ # loadable extension file. Returns the pathname
+ # of the resulting file.
+ out_file = replace_suffix(obj_files[0], ".so")
+ linker = linkers[bool(cplus)]
+ args = [linker] + linker_options + obj_files + ["-o", out_file]
+ if verbose_flag or verbose:
+ print " ".join(args)
+ status = os.spawnvp(os.P_WAIT, linker, args)
+ if status <> 0:
+ raise CCompilerError("Linker returned status %s" % status)
+ return out_file
--- /dev/null
+"""Suite Standard Suite: Common terms for most applications
+Level 1, version 1
+
+Generated from Macintosh HD:System 8.0:Finder
+AETE/AEUT resource version 0/144, language 0, script 0
+"""
+
+import aetools
+import MacOS
+
+_code = 'core'
+
+class Finder_Std_Suite:
+
+ _argmap_class_info = {
+ '_in' : 'wrcd',
+ }
+
+ def class_info(self, _object=None, _attributes={}, **_arguments):
+ """class info: Get information about an object class
+ Required argument: the object class about which information is requested
+ Keyword argument _in: the human language and script system in which to return information
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: a record containing the object's properties and elements
+ """
+ _code = 'core'
+ _subcode = 'qobj'
+
+ aetools.keysubst(_arguments, self._argmap_class_info)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_close = {
+ 'saving' : 'savo',
+ 'saving_in' : 'kfil',
+ }
+
+ def close(self, _object, _attributes={}, **_arguments):
+ """close: Close an object
+ Required argument: the object to close
+ Keyword argument saving: specifies whether changes should be saved before closing
+ Keyword argument saving_in: the file in which to save the object
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'core'
+ _subcode = 'clos'
+
+ aetools.keysubst(_arguments, self._argmap_close)
+ _arguments['----'] = _object
+
+ aetools.enumsubst(_arguments, 'savo', _Enum_savo)
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_count = {
+ 'each' : 'kocl',
+ }
+
+ def count(self, _object, _attributes={}, **_arguments):
+ """count: Return the number of elements of a particular class within an object
+ Required argument: the object whose elements are to be counted
+ Keyword argument each: the class of the elements to be counted
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: the number of elements
+ """
+ _code = 'core'
+ _subcode = 'cnte'
+
+ aetools.keysubst(_arguments, self._argmap_count)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_data_size = {
+ 'as' : 'rtyp',
+ }
+
+ def data_size(self, _object, _attributes={}, **_arguments):
+ """data size: Return the size in bytes of an object
+ Required argument: the object whose data size is to be returned
+ Keyword argument as: the data type for which the size is calculated
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: the size of the object in bytes
+ """
+ _code = 'core'
+ _subcode = 'dsiz'
+
+ aetools.keysubst(_arguments, self._argmap_data_size)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ def delete(self, _object, _attributes={}, **_arguments):
+ """delete: Delete an element from an object
+ Required argument: the element to delete
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'core'
+ _subcode = 'delo'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_duplicate = {
+ 'to' : 'insh',
+ 'replacing' : 'alrp',
+ 'routing_suppressed' : 'rout',
+ }
+
+ def duplicate(self, _object, _attributes={}, **_arguments):
+ """duplicate: Duplicate object(s)
+ Required argument: the object(s) to duplicate
+ Keyword argument to: the new location for the object(s)
+ Keyword argument replacing: Specifies whether or not to replace items in the destination that have the same name as items being duplicated
+ Keyword argument routing_suppressed: Specifies whether or not to autoroute items (default is false). Only applies when copying to the system folder.
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: to the duplicated object(s)
+ """
+ _code = 'core'
+ _subcode = 'clon'
+
+ aetools.keysubst(_arguments, self._argmap_duplicate)
+ _arguments['----'] = _object
+
+ aetools.enumsubst(_arguments, 'alrp', _Enum_bool)
+ aetools.enumsubst(_arguments, 'rout', _Enum_bool)
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_event_info = {
+ '_in' : 'wrcd',
+ }
+
+ def event_info(self, _object, _attributes={}, **_arguments):
+ """event info: Get information about the Apple events in a suite
+ Required argument: the event class of the Apple events for which to return information
+ Keyword argument _in: the human language and script system in which to return information
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: a record containing the events and their parameters
+ """
+ _code = 'core'
+ _subcode = 'gtei'
+
+ aetools.keysubst(_arguments, self._argmap_event_info)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ def exists(self, _object, _attributes={}, **_arguments):
+ """exists: Verify if an object exists
+ Required argument: the object in question
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: true if it exists, false if not
+ """
+ _code = 'core'
+ _subcode = 'doex'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_get = {
+ 'as' : 'rtyp',
+ }
+
+ def get(self, _object, _attributes={}, **_arguments):
+ """get: Get the data for an object
+ Required argument: the object whose data is to be returned
+ Keyword argument as: the desired types for the data, in order of preference
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: the data from the object
+ """
+ _code = 'core'
+ _subcode = 'getd'
+
+ aetools.keysubst(_arguments, self._argmap_get)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_make = {
+ 'new' : 'kocl',
+ 'at' : 'insh',
+ 'to' : 'to ',
+ 'with_data' : 'data',
+ 'with_properties' : 'prdt',
+ }
+
+ def make(self, _no_object=None, _attributes={}, **_arguments):
+ """make: Make a new element
+ Keyword argument new: the class of the new element
+ Keyword argument at: the location at which to insert the element
+ Keyword argument to: when creating an alias file, the original item to create an alias to
+ Keyword argument with_data: the initial data for the element
+ Keyword argument with_properties: the initial values for the properties of the element
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: to the new object(s)
+ """
+ _code = 'core'
+ _subcode = 'crel'
+
+ aetools.keysubst(_arguments, self._argmap_make)
+ if _no_object != None: raise TypeError, 'No direct arg expected'
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_move = {
+ 'to' : 'insh',
+ 'replacing' : 'alrp',
+ 'positioned_at' : 'mvpl',
+ 'routing_suppressed' : 'rout',
+ }
+
+ def move(self, _object, _attributes={}, **_arguments):
+ """move: Move object(s) to a new location
+ Required argument: the object(s) to move
+ Keyword argument to: the new location for the object(s)
+ Keyword argument replacing: Specifies whether or not to replace items in the destination that have the same name as items being moved
+ Keyword argument positioned_at: Gives a list (in local window coordinates) of positions for the destination items
+ Keyword argument routing_suppressed: Specifies whether or not to autoroute items (default is false). Only applies when moving to the system folder.
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: to the object(s) after they have been moved
+ """
+ _code = 'core'
+ _subcode = 'move'
+
+ aetools.keysubst(_arguments, self._argmap_move)
+ _arguments['----'] = _object
+
+ aetools.enumsubst(_arguments, 'alrp', _Enum_bool)
+ aetools.enumsubst(_arguments, 'mvpl', _Enum_list)
+ aetools.enumsubst(_arguments, 'rout', _Enum_bool)
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_open = {
+ 'using' : 'usin',
+ 'with_properties' : 'prdt',
+ }
+
+ def open(self, _object, _attributes={}, **_arguments):
+ """open: Open the specified object(s)
+ Required argument: list of objects to open
+ Keyword argument using: the application file to open the object with
+ Keyword argument with_properties: the initial values for the properties, to be sent along with the open event sent to the application that opens the direct object
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'aevt'
+ _subcode = 'odoc'
+
+ aetools.keysubst(_arguments, self._argmap_open)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ def _print(self, _object, _attributes={}, **_arguments):
+ """print: Print the specified object(s)
+ Required argument: list of objects to print
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'aevt'
+ _subcode = 'pdoc'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_quit = {
+ 'saving' : 'savo',
+ }
+
+ def quit(self, _no_object=None, _attributes={}, **_arguments):
+ """quit: Quit the Finder (direct parameter ignored)
+ Keyword argument saving: specifies whether to save currently open documents (not supported by Finder)
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'aevt'
+ _subcode = 'quit'
+
+ aetools.keysubst(_arguments, self._argmap_quit)
+ if _no_object != None: raise TypeError, 'No direct arg expected'
+
+ aetools.enumsubst(_arguments, 'savo', _Enum_savo)
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_save = {
+ '_in' : 'kfil',
+ 'as' : 'fltp',
+ }
+
+ def save(self, _object, _attributes={}, **_arguments):
+ """save: Save an object (Not supported by Finder)
+ Required argument: the object to save
+ Keyword argument _in: the file in which to save the object (not supported by Finder)
+ Keyword argument as: the file type of the document in which to save the data (not supported by Finder)
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'core'
+ _subcode = 'save'
+
+ aetools.keysubst(_arguments, self._argmap_save)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_set = {
+ 'to' : 'data',
+ }
+
+ def set(self, _object, _attributes={}, **_arguments):
+ """set: Set an object's data
+ Required argument: the object to change
+ Keyword argument to: the new value
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'core'
+ _subcode = 'setd'
+
+ aetools.keysubst(_arguments, self._argmap_set)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+ _argmap_suite_info = {
+ '_in' : 'wrcd',
+ }
+
+ def suite_info(self, _object, _attributes={}, **_arguments):
+ """suite info: Get information about event suite(s)
+ Required argument: the suite for which to return information
+ Keyword argument _in: the human language and script system in which to return information
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ Returns: a record containing the suites and their versions
+ """
+ _code = 'core'
+ _subcode = 'gtsi'
+
+ aetools.keysubst(_arguments, self._argmap_suite_info)
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+
+class application(aetools.ComponentItem):
+ """application - An application program"""
+ want = 'capp'
+class about_this_computer(aetools.NProperty):
+ """about this computer - the "About this Computer" dialog and the list of running processes displayed in it"""
+ which = 'abbx'
+ want = 'obj '
+class apple_menu_items_folder(aetools.NProperty):
+ """apple menu items folder - the special folder named "Apple Menu Items," the contents of which appear in the Apple menu"""
+ which = 'amnu'
+ want = 'obj '
+class clipboard(aetools.NProperty):
+ """clipboard - the Finder's clipboard window"""
+ which = 'pcli'
+ want = 'obj '
+class control_panels_folder(aetools.NProperty):
+ """control panels folder - the special folder named 'Control Panels'"""
+ which = 'ctrl'
+ want = 'obj '
+class desktop(aetools.NProperty):
+ """desktop - the desktop"""
+ which = 'desk'
+ want = 'obj '
+class extensions_folder(aetools.NProperty):
+ """extensions folder - the special folder named 'Extensions'"""
+ which = 'extn'
+ want = 'obj '
+class file_sharing(aetools.NProperty):
+ """file sharing - Is file sharing on?"""
+ which = 'fshr'
+ want = 'bool'
+class Finder_preferences(aetools.NProperty):
+ """Finder preferences - Various preferences that apply to the Finder as a whole"""
+ which = 'pfrp'
+ want = 'obj '
+class fonts_folder(aetools.NProperty):
+ """fonts folder - the special folder named 'Fonts'"""
+ which = 'ffnt'
+ want = 'obj '
+class frontmost(aetools.NProperty):
+ """frontmost - Is the Finder the frontmost process?"""
+ which = 'pisf'
+ want = 'bool'
+class insertion_location(aetools.NProperty):
+ """insertion location - the container in which a new folder would appear if "New Folder" was selected"""
+ which = 'pins'
+ want = 'obj '
+class largest_free_block(aetools.NProperty):
+ """largest free block - the largest free block of process memory available to launch an application"""
+ which = 'mfre'
+ want = 'long'
+class preferences_folder(aetools.NProperty):
+ """preferences folder - the special folder named 'Preferences'"""
+ which = 'pref'
+ want = 'obj '
+class product_version(aetools.NProperty):
+ """product version - the version of the System software running on this computer"""
+ which = 'ver2'
+ want = 'itxt'
+class selection(aetools.NProperty):
+ """selection - the selection visible to the user"""
+ which = 'sele'
+ want = 'obj '
+class sharing_starting_up(aetools.NProperty):
+ """sharing starting up - Is file sharing in the process of starting up?"""
+ which = 'fsup'
+ want = 'bool'
+class shutdown_items_folder(aetools.NProperty):
+ """shutdown items folder - the special folder named 'Shutdown Items'"""
+ which = 'shdf'
+ want = 'obj '
+class startup_items_folder(aetools.NProperty):
+ """startup items folder - the special folder named 'Startup Items'"""
+ which = 'strt'
+ want = 'obj '
+class system_folder(aetools.NProperty):
+ """system folder - the System folder"""
+ which = 'macs'
+ want = 'obj '
+class temporary_items_folder(aetools.NProperty):
+ """temporary items folder - the special folder named "Temporary Items" (invisible)"""
+ which = 'temp'
+ want = 'obj '
+class version(aetools.NProperty):
+ """version - the version of the Finder"""
+ which = 'vers'
+ want = 'itxt'
+class view_preferences(aetools.NProperty):
+ """view preferences - backwards compatibility with Finder Scripting Extension. DEPRECATED -- not supported after Finder 8.0"""
+ which = 'pvwp'
+ want = 'obj '
+class visible(aetools.NProperty):
+ """visible - Is the Finder's layer visible?"""
+ which = 'pvis'
+ want = 'bool'
+# element 'dsut' as ['indx', 'name']
+# element 'alia' as ['indx', 'name']
+# element 'appf' as ['indx', 'name', 'ID ']
+# element 'clpf' as ['indx', 'name']
+# element 'lwnd' as ['indx', 'name']
+# element 'ctnr' as ['indx', 'name']
+# element 'cwnd' as ['indx', 'name']
+# element 'dwnd' as ['indx', 'name']
+# element 'ccdv' as ['indx', 'name']
+# element 'dafi' as ['indx', 'name']
+# element 'cdsk' as ['indx', 'name']
+# element 'cdis' as ['indx', 'name', 'ID ']
+# element 'docf' as ['indx', 'name']
+# element 'file' as ['indx', 'name']
+# element 'cfol' as ['indx', 'name', 'ID ']
+# element 'fntf' as ['indx', 'name']
+# element 'fsut' as ['indx', 'name']
+# element 'iwnd' as ['indx', 'name']
+# element 'cobj' as ['indx', 'name']
+# element 'sctr' as ['indx', 'name']
+# element 'swnd' as ['indx', 'name']
+# element 'sndf' as ['indx', 'name']
+# element 'qwnd' as ['indx', 'name']
+# element 'stcs' as ['indx', 'name']
+# element 'ctrs' as ['indx', 'name']
+# element 'cwin' as ['indx', 'name']
+
+class file(aetools.ComponentItem):
+ """file - A file"""
+ want = 'file'
+class creator_type(aetools.NProperty):
+ """creator type - the OSType identifying the application that created the item"""
+ which = 'fcrt'
+ want = 'type'
+class file_type_obsolete(aetools.NProperty):
+ """file type obsolete - the OSType identifying the type of data contained in the item (DEPRECATED - for use with scripts compiled before Finder 8.0. Will be removed in the next release)"""
+ which = 'fitp'
+ want = 'type'
+class file_type(aetools.NProperty):
+ """file type - the OSType identifying the type of data contained in the item"""
+ which = 'asty'
+ want = 'type'
+class locked_obsolete(aetools.NProperty):
+ """locked obsolete - Is the file locked? (DEPRECATED - for use with scripts compiled before Finder 8.0. Will be removed in the next release)"""
+ which = 'islk'
+ want = 'bool'
+class locked(aetools.NProperty):
+ """locked - Is the file locked?"""
+ which = 'aslk'
+ want = 'bool'
+# repeated property product_version the version of the product (visible at the top of the "Get Info" window)
+class stationery(aetools.NProperty):
+ """stationery - Is the file a stationery pad?"""
+ which = 'pspd'
+ want = 'bool'
+# repeated property version the version of the file (visible at the bottom of the "Get Info" window)
+
+files = file
+
+class window(aetools.ComponentItem):
+ """window - A window"""
+ want = 'cwin'
+class collapsed(aetools.NProperty):
+ """collapsed - Is the window collapsed (only applies to non-pop-up windows)?"""
+ which = 'wshd'
+ want = 'bool'
+class popup(aetools.NProperty):
+ """popup - Is the window is a pop-up window?"""
+ which = 'drwr'
+ want = 'bool'
+class pulled_open(aetools.NProperty):
+ """pulled open - Is the window pulled open (only applies to pop-up windows)?"""
+ which = 'pull'
+ want = 'bool'
+# repeated property visible Is the window visible (always true for Finder windows)?
+class zoomed_full_size(aetools.NProperty):
+ """zoomed full size - Is the window zoomed to the full size of the screen? (can only be set, not read)"""
+ which = 'zumf'
+ want = 'bool'
+
+windows = window
+# XXXX application element 'dsut' not found!!
+# XXXX application element 'alia' not found!!
+# XXXX application element 'appf' not found!!
+# XXXX application element 'clpf' not found!!
+# XXXX application element 'lwnd' not found!!
+# XXXX application element 'ctnr' not found!!
+# XXXX application element 'cwnd' not found!!
+# XXXX application element 'dwnd' not found!!
+# XXXX application element 'ccdv' not found!!
+# XXXX application element 'dafi' not found!!
+# XXXX application element 'cdsk' not found!!
+# XXXX application element 'cdis' not found!!
+# XXXX application element 'docf' not found!!
+# XXXX application element 'cfol' not found!!
+# XXXX application element 'fntf' not found!!
+# XXXX application element 'fsut' not found!!
+# XXXX application element 'iwnd' not found!!
+# XXXX application element 'cobj' not found!!
+# XXXX application element 'sctr' not found!!
+# XXXX application element 'swnd' not found!!
+# XXXX application element 'sndf' not found!!
+# XXXX application element 'qwnd' not found!!
+# XXXX application element 'stcs' not found!!
+# XXXX application element 'ctrs' not found!!
+application._propdict = {
+ 'about_this_computer' : about_this_computer,
+ 'apple_menu_items_folder' : apple_menu_items_folder,
+ 'clipboard' : clipboard,
+ 'control_panels_folder' : control_panels_folder,
+ 'desktop' : desktop,
+ 'extensions_folder' : extensions_folder,
+ 'file_sharing' : file_sharing,
+ 'Finder_preferences' : Finder_preferences,
+ 'fonts_folder' : fonts_folder,
+ 'frontmost' : frontmost,
+ 'insertion_location' : insertion_location,
+ 'largest_free_block' : largest_free_block,
+ 'preferences_folder' : preferences_folder,
+ 'product_version' : product_version,
+ 'selection' : selection,
+ 'sharing_starting_up' : sharing_starting_up,
+ 'shutdown_items_folder' : shutdown_items_folder,
+ 'startup_items_folder' : startup_items_folder,
+ 'system_folder' : system_folder,
+ 'temporary_items_folder' : temporary_items_folder,
+ 'version' : version,
+ 'view_preferences' : view_preferences,
+ 'visible' : visible,
+}
+application._elemdict = {
+ 'file' : file,
+ 'window' : window,
+}
+file._propdict = {
+ 'creator_type' : creator_type,
+ 'file_type_obsolete' : file_type_obsolete,
+ 'file_type' : file_type,
+ 'locked_obsolete' : locked_obsolete,
+ 'locked' : locked,
+ 'product_version' : product_version,
+ 'stationery' : stationery,
+ 'version' : version,
+}
+file._elemdict = {
+}
+window._propdict = {
+ 'collapsed' : collapsed,
+ 'popup' : popup,
+ 'pulled_open' : pulled_open,
+ 'visible' : visible,
+ 'zoomed_full_size' : zoomed_full_size,
+}
+window._elemdict = {
+}
+# XXXX enum list not found!!
+# XXXX enum bool not found!!
+# XXXX enum savo not found!!
+
+#
+# Indices of types declared in this module
+#
+_classdeclarations = {
+ 'cwin' : window,
+ 'file' : file,
+ 'capp' : application,
+}
+
+_propdeclarations = {
+ 'amnu' : apple_menu_items_folder,
+ 'pvwp' : view_preferences,
+ 'extn' : extensions_folder,
+ 'pins' : insertion_location,
+ 'fshr' : file_sharing,
+ 'aslk' : locked,
+ 'drwr' : popup,
+ 'fcrt' : creator_type,
+ 'pcli' : clipboard,
+ 'asty' : file_type,
+ 'strt' : startup_items_folder,
+ 'islk' : locked_obsolete,
+ 'pvis' : visible,
+ 'pref' : preferences_folder,
+ 'pisf' : frontmost,
+ 'sele' : selection,
+ 'temp' : temporary_items_folder,
+ 'pull' : pulled_open,
+ 'abbx' : about_this_computer,
+ 'wshd' : collapsed,
+ 'pspd' : stationery,
+ 'fitp' : file_type_obsolete,
+ 'pfrp' : Finder_preferences,
+ 'desk' : desktop,
+ 'fsup' : sharing_starting_up,
+ 'mfre' : largest_free_block,
+ 'ctrl' : control_panels_folder,
+ 'zumf' : zoomed_full_size,
+ 'shdf' : shutdown_items_folder,
+ 'ffnt' : fonts_folder,
+ 'macs' : system_folder,
+ 'ver2' : product_version,
+ 'vers' : version,
+}
+
+_compdeclarations = {
+}
+
+_enumdeclarations = {
+}
--- /dev/null
+"""Suite Misc Suite: Suite that adds additional features to the Application.
+Level 1, version 1
+
+Generated from MPW:MPW Shell
+AETE/AEUT resource version 1/0, language 0, script 0
+"""
+
+import aetools
+import MacOS
+
+_code = 'misc'
+
+class MPW_Misc_Suite:
+
+ def DoScript(self, _object, _attributes={}, **_arguments):
+ """DoScript: Execute an MPW command, any command that could be executed from the command line can be sent as a script.
+ Required argument: The script to execute
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'misc'
+ _subcode = 'dosc'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+
+#
+# Indices of types declared in this module
+#
+_classdeclarations = {
+}
+
+_propdeclarations = {
+}
+
+_compdeclarations = {
+}
+
+_enumdeclarations = {
+}
--- /dev/null
+#
+# Pyrex -- Mac system interface
+#
+
+import os, sys, string
+import aetools
+from aetools import TalkTo
+from StdSuites.Standard_Suite import Standard_Suite_Events as Standard_Suite
+from Pyrex.Utils import replace_suffix
+from Pyrex.Compiler.Errors import PyrexError
+
+c_compiler = "MWCPPC"
+c_optimizations = "off"
+#c_linker = "PPCLink"
+c_linker = "MWLinkPPC"
+shared_lib_suffix = ".slb"
+
+#py_home = "Python2.2:Home:"
+py_home = sys.exec_prefix
+
+py_include_dirs = (
+ py_home + "Include:",
+ py_home + "Mac:Include:"
+)
+
+pythoncore = py_home + "PythonCore"
+
+mwlibdir = "MPW:Interfaces&Libraries:Libraries:MWPPCLibraries:"
+
+libraries = (
+ #mwlibdir + "'MSL C.PPC.Lib'",
+ #mwlibdir + "'MSL RuntimePPC.Lib'",
+ mwlibdir + "'MSL ShLibRuntime.Lib'",
+ mwlibdir + "InterfaceLib",
+ #mwlibdir + "MathLib",
+ )
+
+class CCompilerError(PyrexError):
+ pass
+
+#---------------- ToolServer ---------------------------
+
+from TS_Misc_Suite import TS_Misc_Suite
+
+class ToolServer(Standard_Suite, TS_Misc_Suite, TalkTo):
+ pass
+
+def send_toolserver_command(cmd):
+ ts = ToolServer('MPSX', start = 1)
+ return ts.DoScript(cmd)
+
+def do_toolserver_command(command):
+ try:
+ result = send_toolserver_command(command)
+ except aetools.Error, e:
+ raise CCompilerError("Apple Event error: %s" % e)
+ errn, stat, stdout, stderr = result
+ if errn:
+ raise CCompilerError("ToolServer error: %s" % errn)
+ stdout = string.replace(stdout, "\r", "\n")
+ stderr = string.replace(stderr, "\r", "\n")
+ if stdout:
+ #print "<<< Begin ToolServer StdOut >>>"
+ sys.stderr.write(stdout)
+ #print "<<< End ToolServer StdOut >>>"
+ if stderr:
+ #print "<<< Begin ToolServer StdErr >>>"
+ sys.stderr.write(stderr)
+ #print "<<< End ToolServer StdErr >>>"
+ return stat
+
+#-------------------------------------------------------
+
+def c_compile(c_file):
+ # Compile the given C source file to produce
+ # an object file. Returns the pathname of the
+ # resulting file.
+ c_file = os.path.join(os.getcwd(), c_file)
+ #print "c_compile: c_file =", repr(c_file) ###
+ c_file_dir = os.path.dirname(c_file)
+ o_file = replace_suffix(c_file, ".o")
+ include_options = ["-i %s" % c_file_dir]
+ for dir in py_include_dirs:
+ include_options.append("-i %s" % dir)
+ command = "%s -opt %s -nomapcr -w off -r %s %s -o %s" % (
+ c_compiler,
+ c_optimizations,
+ string.join(include_options),
+ c_file,
+ o_file,
+ #e_file
+ )
+ #print "...command =", repr(command) ###
+ stat = do_toolserver_command(command)
+ if stat:
+ raise CCompilerError("C compiler returned status %s" % stat)
+ return o_file
+
+def c_link(obj_file):
+ return c_link_list([obj_file])
+
+def c_link_list(obj_files):
+ # Link the given object files into a dynamically
+ # loadable extension file. Returns the pathname
+ # of the resulting file.
+ out_file = replace_suffix(obj_files[0], shared_lib_suffix)
+ command = "%s -xm s -export all %s %s %s -o %s" % (
+ c_linker,
+ string.join(obj_files),
+ pythoncore,
+ string.join(libraries),
+ out_file)
+ stat = do_toolserver_command(command)
+ if stat:
+ raise CCompilerError("Linker returned status %s" % stat)
+ return out_file
+
+def test_c_compile(link = 0):
+ objs = []
+ for arg in sys.argv[1:]:
+ if arg.endswith(".c"):
+ try:
+ obj = c_compile(arg)
+ except PyrexError, e:
+ #print "Caught a PyrexError:" ###
+ #print repr(e) ###
+ print "%s.%s:" % (e.__class__.__module__,
+ e.__class__.__name__), e
+ sys.exit(1)
+ else:
+ obj = arg
+ objs.append(obj)
+ if link:
+ c_link_list(objs)
+
--- /dev/null
+#
+# Pyrex -- Misc Mac-specific things
+#
+
+import os, MacOS, macfs
+
+def open_new_file(path):
+ # On the Mac, try to preserve Finder position
+ # of previously existing file.
+ fsspec = macfs.FSSpec(path)
+ try:
+ old_finfo = fsspec.GetFInfo()
+ except MacOS.Error, e:
+ #print "MacUtils.open_new_file:", e ###
+ old_finfo = None
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+ file = open(path, "w")
+ new_finfo = fsspec.GetFInfo()
+ if old_finfo:
+ #print "MacUtils.open_new_file:", path ###
+ #print "...old file info =", old_finfo.Creator, old_finfo.Type, old_finfo.Location ###
+ #print "...new file info =", new_finfo.Creator, new_finfo.Type, new_finfo.Location ###
+ new_finfo.Location = old_finfo.Location
+ new_finfo.Flags = old_finfo.Flags
+ # Make darn sure the type and creator are right. There seems
+ # to be a bug in MacPython 2.2 that screws them up sometimes.
+ new_finfo.Creator = "R*ch"
+ new_finfo.Type = "TEXT"
+ fsspec.SetFInfo(new_finfo)
+ return file
+
--- /dev/null
+"Apple Event suite for pyserver."
+
+import aetools
+import MacOS
+
+_code = 'misc'
+
+class PS_Misc_Suite:
+
+ def DoScript(self, _object, _attributes={}, **_arguments):
+ """DoScript: Execute a\ 5 Python file, optionally with command line args.
+ Required argument: filename.py or [filename.py, arg, ...]
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'misc'
+ _subcode = 'dosc'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ if _arguments.has_key('errn'):
+ raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ if _arguments.has_key('----'):
+ return _arguments['----']
+
+
+#
+# Indices of types declared in this module
+#
+_classdeclarations = {
+}
+
+_propdeclarations = {
+}
+
+_compdeclarations = {
+}
+
+_enumdeclarations = {
+}
--- /dev/null
+#
+# Simple Apple-event driven Python interpreter
+#
+
+import os, sys, traceback
+from cStringIO import StringIO
+from MiniAEFrame import AEServer, MiniApplication
+
+class PythonServer(AEServer, MiniApplication):
+
+ def __init__(self):
+ MiniApplication.__init__(self)
+ AEServer.__init__(self)
+ self.installaehandler('aevt', 'oapp', ignore)
+ self.installaehandler('aevt', 'quit', quit)
+ self.installaehandler('misc', 'dosc', doscript)
+
+
+def ignore(**kwds):
+ pass
+
+def quit(**kwds):
+ server._quit()
+
+def doscript(args, **kwds):
+ print "doscript:", repr(args) ###
+ stat = 0
+ output = ""
+ errput = ""
+ #print "Normalising args" ###
+ if type(args) == type(""):
+ args = [args]
+ #print "Setting sys.argv" ###
+ sys.argv = args
+ #print "Finding script directory and module file" ###
+ dir = os.path.dirname(args[0])
+ dir = os.path.join(start_dir, dir)
+ pyfile = os.path.basename(args[0])
+ mod = os.path.splitext(pyfile)[0]
+ #print "dir:", repr(dir) ###
+ #print "mod:", repr(mod) ###
+ os.chdir(dir)
+ sys.path = start_path[:]
+ sys.path[0] = dir
+ #print "path:", sys.path ###
+ try:
+ sys.stdout = StringIO()
+ sys.stderr = StringIO()
+ try:
+ #sys.__stdout__.write("Path: %s\n" % sys.path) ###
+ #sys.__stdout__.write("Importing: %s\n" % mod) ###
+ try:
+ __import__(mod)
+ except KeyboardInterrupt:
+ raise
+ except SystemExit, exc:
+ #sys.__stdout__.write("Caught a SystemExit\n") ###
+ try:
+ stat = int(str(exc))
+ except ValueError:
+ stat = 1
+ #sys.__stdout__.write("stat = %s\n" % stat) ###
+ except:
+ traceback.print_exc()
+ stat = 1
+ #sys.__stdout__.write("Done the import\n") ###
+ finally:
+ output = sys.stdout.getvalue()
+ #sys.__stdout__.write("Output:\n%s" % output) ###
+ errput = sys.stderr.getvalue()
+ finally:
+ sys.stdout = sys.__stdout__
+ sys.stderr = sys.__stdout__
+ pass
+ return [stat, output, errput]
+
+start_dir = os.getcwd()
+start_path = sys.path[:]
+server = PythonServer()
+#print "Open for business"
+try:
+ server.mainloop()
+except:
+ traceback.print_exc()
+ #sys.exit(1)
+#print "Closing shop"
--- /dev/null
+"""Suite Misc Suite: Suite that adds additional features to the Application.
+Level 1, version 1
+
+Generated from Macintosh HD:Desktop Folder:ToolServer 3.4.1:ToolServer
+AETE/AEUT resource version 1/0, language 0, script 0
+"""
+
+import aetools
+import MacOS
+
+_code = 'misc'
+
+class TS_Misc_Suite:
+
+ def DoScript(self, _object, _attributes={}, **_arguments):
+ """DoScript: Execute an MPW command, any command that could be executed from the command line can be sent as a script.
+ Required argument: The script to execute
+ Keyword argument _attributes: AppleEvent attribute dictionary
+ """
+ _code = 'misc'
+ _subcode = 'dosc'
+
+ if _arguments: raise TypeError, 'No optional args expected'
+ _arguments['----'] = _object
+
+
+ _reply, _arguments, _attributes = self.send(_code, _subcode,
+ _arguments, _attributes)
+ #if _arguments.has_key('errn'):
+ # raise aetools.Error, aetools.decodeerror(_arguments)
+ # XXXX Optionally decode result
+ #if _arguments.has_key('----'):
+ # return _arguments['----']
+ errn = 0
+ stat = 0
+ stdout = ""
+ stderr = ""
+ if _arguments.has_key('errn'):
+ errn = _arguments['errn']
+ if errn:
+ errn = aetools.decodeerror(_arguments)
+ if _arguments.has_key('stat'):
+ stat = _arguments['stat']
+ if _arguments.has_key('----'):
+ stdout = _arguments['----']
+ if _arguments.has_key('diag'):
+ stderr = _arguments['diag']
+ return (errn, stat, stdout, stderr)
+
+
+#
+# Indices of types declared in this module
+#
+_classdeclarations = {
+}
+
+_propdeclarations = {
+}
+
+_compdeclarations = {
+}
+
+_enumdeclarations = {
+}
--- /dev/null
+/*
+ * This is a hacked version of _Filemodule.c from the Python 2.3
+ * distribution to support access to the finderInfo field of the
+ * FSCatalogInfo data structure.
+ */
+
+/* ========================== Module _File ========================== */
+
+#include "Python.h"
+
+
+
+#ifdef _WIN32
+#include "pywintoolbox.h"
+#else
+#include "macglue.h"
+#include "pymactoolbox.h"
+#endif
+
+/* Macro to test whether a weak-loaded CFM function exists */
+#define PyMac_PRECHECK(rtn) do { if ( &rtn == NULL ) {\
+ PyErr_SetString(PyExc_NotImplementedError, \
+ "Not available in this shared library/OS version"); \
+ return NULL; \
+ }} while(0)
+
+
+#ifdef WITHOUT_FRAMEWORKS
+#include <Files.h>
+#else
+#include <Carbon/Carbon.h>
+#endif
+
+#ifdef USE_TOOLBOX_OBJECT_GLUE
+extern int _PyMac_GetFSSpec(PyObject *v, FSSpec *spec);
+extern int _PyMac_GetFSRef(PyObject *v, FSRef *fsr);
+extern PyObject *_PyMac_BuildFSSpec(FSSpec *spec);
+extern PyObject *_PyMac_BuildFSRef(FSRef *spec);
+
+#define PyMac_GetFSSpec _PyMac_GetFSSpec
+#define PyMac_GetFSRef _PyMac_GetFSRef
+#define PyMac_BuildFSSpec _PyMac_BuildFSSpec
+#define PyMac_BuildFSRef _PyMac_BuildFSRef
+#else
+extern int PyMac_GetFSSpec(PyObject *v, FSSpec *spec);
+extern int PyMac_GetFSRef(PyObject *v, FSRef *fsr);
+extern PyObject *PyMac_BuildFSSpec(FSSpec *spec);
+extern PyObject *PyMac_BuildFSRef(FSRef *spec);
+#endif
+
+/* Forward declarations */
+static PyObject *FInfo_New(FInfo *itself);
+static PyObject *FSRef_New(FSRef *itself);
+static PyObject *FSSpec_New(FSSpec *itself);
+static PyObject *Alias_New(AliasHandle itself);
+static int FInfo_Convert(PyObject *v, FInfo *p_itself);
+#define FSRef_Convert PyMac_GetFSRef
+#define FSSpec_Convert PyMac_GetFSSpec
+static int Alias_Convert(PyObject *v, AliasHandle *p_itself);
+
+/*
+** UTCDateTime records
+*/
+static int
+UTCDateTime_Convert(PyObject *v, UTCDateTime *ptr)
+{
+ return PyArg_Parse(v, "(HlH)", &ptr->highSeconds, &ptr->lowSeconds, &ptr->fraction);
+}
+
+static PyObject *
+UTCDateTime_New(UTCDateTime *ptr)
+{
+ return Py_BuildValue("(HlH)", ptr->highSeconds, ptr->lowSeconds, ptr->fraction);
+}
+
+/*
+** Optional fsspec and fsref pointers. None will pass NULL
+*/
+static int
+myPyMac_GetOptFSSpecPtr(PyObject *v, FSSpec **spec)
+{
+ if (v == Py_None) {
+ *spec = NULL;
+ return 1;
+ }
+ return PyMac_GetFSSpec(v, *spec);
+}
+
+static int
+myPyMac_GetOptFSRefPtr(PyObject *v, FSRef **ref)
+{
+ if (v == Py_None) {
+ *ref = NULL;
+ return 1;
+ }
+ return PyMac_GetFSRef(v, *ref);
+}
+
+/*
+** Parse/generate objsect
+*/
+static PyObject *
+PyMac_BuildHFSUniStr255(HFSUniStr255 *itself)
+{
+
+ return Py_BuildValue("u#", itself->unicode, itself->length);
+}
+
+static PyObject *File_Error;
+
+/* ------------------- Object type FSCatalogInfo -------------------- */
+
+static PyTypeObject FSCatalogInfo_Type;
+
+#define FSCatalogInfo_Check(x) ((x)->ob_type == &FSCatalogInfo_Type || PyObject_TypeCheck((x), &FSCatalogInfo_Type))
+
+typedef struct FSCatalogInfoObject {
+ PyObject_HEAD
+ FSCatalogInfo ob_itself;
+} FSCatalogInfoObject;
+
+static PyObject *FSCatalogInfo_New(FSCatalogInfo *itself)
+{
+ FSCatalogInfoObject *it;
+ if (itself == NULL) return Py_None;
+ it = PyObject_NEW(FSCatalogInfoObject, &FSCatalogInfo_Type);
+ if (it == NULL) return NULL;
+ it->ob_itself = *itself;
+ return (PyObject *)it;
+}
+static int FSCatalogInfo_Convert(PyObject *v, FSCatalogInfo *p_itself)
+{
+ if (!FSCatalogInfo_Check(v))
+ {
+ PyErr_SetString(PyExc_TypeError, "FSCatalogInfo required");
+ return 0;
+ }
+ *p_itself = ((FSCatalogInfoObject *)v)->ob_itself;
+ return 1;
+}
+
+static void FSCatalogInfo_dealloc(FSCatalogInfoObject *self)
+{
+ /* Cleanup of self->ob_itself goes here */
+ self->ob_type->tp_free((PyObject *)self);
+}
+
+static PyMethodDef FSCatalogInfo_methods[] = {
+ {NULL, NULL, 0}
+};
+
+static PyObject *FSCatalogInfo_get_nodeFlags(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("H", self->ob_itself.nodeFlags);
+}
+
+static int FSCatalogInfo_set_nodeFlags(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "H", &self->ob_itself.nodeFlags)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_volume(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("h", self->ob_itself.volume);
+}
+
+static int FSCatalogInfo_set_volume(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "h", &self->ob_itself.volume)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_parentDirID(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.parentDirID);
+}
+
+static int FSCatalogInfo_set_parentDirID(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.parentDirID)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_nodeID(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.nodeID);
+}
+
+static int FSCatalogInfo_set_nodeID(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.nodeID)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_createDate(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.createDate);
+}
+
+static int FSCatalogInfo_set_createDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.createDate)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_contentModDate(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.contentModDate);
+}
+
+static int FSCatalogInfo_set_contentModDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.contentModDate)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_attributeModDate(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.attributeModDate);
+}
+
+static int FSCatalogInfo_set_attributeModDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.attributeModDate)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_accessDate(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.accessDate);
+}
+
+static int FSCatalogInfo_set_accessDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.accessDate)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_backupDate(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", UTCDateTime_New, &self->ob_itself.backupDate);
+}
+
+static int FSCatalogInfo_set_backupDate(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", UTCDateTime_Convert, &self->ob_itself.backupDate)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_permissions(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("(llll)", self->ob_itself.permissions[0], self->ob_itself.permissions[1], self->ob_itself.permissions[2], self->ob_itself.permissions[3]);
+}
+
+static int FSCatalogInfo_set_permissions(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "(llll)", &self->ob_itself.permissions[0], &self->ob_itself.permissions[1], &self->ob_itself.permissions[2], &self->ob_itself.permissions[3])-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_valence(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.valence);
+}
+
+static int FSCatalogInfo_set_valence(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.valence)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_dataLogicalSize(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.dataLogicalSize);
+}
+
+static int FSCatalogInfo_set_dataLogicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.dataLogicalSize)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_dataPhysicalSize(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.dataPhysicalSize);
+}
+
+static int FSCatalogInfo_set_dataPhysicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.dataPhysicalSize)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_rsrcLogicalSize(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.rsrcLogicalSize);
+}
+
+static int FSCatalogInfo_set_rsrcLogicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.rsrcLogicalSize)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_rsrcPhysicalSize(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.rsrcPhysicalSize);
+}
+
+static int FSCatalogInfo_set_rsrcPhysicalSize(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.rsrcPhysicalSize)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_sharingFlags(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("l", self->ob_itself.sharingFlags);
+}
+
+static int FSCatalogInfo_set_sharingFlags(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "l", &self->ob_itself.sharingFlags)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_userPrivileges(FSCatalogInfoObject *self, void *closure)
+{
+ return Py_BuildValue("b", self->ob_itself.userPrivileges);
+}
+
+static int FSCatalogInfo_set_userPrivileges(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "b", &self->ob_itself.userPrivileges)-1;
+ return 0;
+}
+
+static PyObject *FSCatalogInfo_get_finderInfo(FSCatalogInfoObject *self, void *closure)
+{
+ return FInfo_New((FInfo *)self->finderInfo);
+}
+
+static int FSCatalogInfo_set_finderInfo(FSCatalogInfoObject *self, PyObject *v, void *closure)
+{
+ if (!FInfo_Check(v)) {
+ PyErr_SetString(PyTypeError, "Expected an FInfo object");
+ return -1;
+ }
+ *(FInfo *)self->finderInfo = ((FInfoObject *)self)->ob_itself;
+ return 0;
+}
+
+static PyGetSetDef FSCatalogInfo_getsetlist[] = {
+ {"nodeFlags", (getter)FSCatalogInfo_get_nodeFlags, (setter)FSCatalogInfo_set_nodeFlags, NULL},
+ {"volume", (getter)FSCatalogInfo_get_volume, (setter)FSCatalogInfo_set_volume, NULL},
+ {"parentDirID", (getter)FSCatalogInfo_get_parentDirID, (setter)FSCatalogInfo_set_parentDirID, NULL},
+ {"nodeID", (getter)FSCatalogInfo_get_nodeID, (setter)FSCatalogInfo_set_nodeID, NULL},
+ {"createDate", (getter)FSCatalogInfo_get_createDate, (setter)FSCatalogInfo_set_createDate, NULL},
+ {"contentModDate", (getter)FSCatalogInfo_get_contentModDate, (setter)FSCatalogInfo_set_contentModDate, NULL},
+ {"attributeModDate", (getter)FSCatalogInfo_get_attributeModDate, (setter)FSCatalogInfo_set_attributeModDate, NULL},
+ {"accessDate", (getter)FSCatalogInfo_get_accessDate, (setter)FSCatalogInfo_set_accessDate, NULL},
+ {"backupDate", (getter)FSCatalogInfo_get_backupDate, (setter)FSCatalogInfo_set_backupDate, NULL},
+ {"permissions", (getter)FSCatalogInfo_get_permissions, (setter)FSCatalogInfo_set_permissions, NULL},
+ {"valence", (getter)FSCatalogInfo_get_valence, (setter)FSCatalogInfo_set_valence, NULL},
+ {"dataLogicalSize", (getter)FSCatalogInfo_get_dataLogicalSize, (setter)FSCatalogInfo_set_dataLogicalSize, NULL},
+ {"dataPhysicalSize", (getter)FSCatalogInfo_get_dataPhysicalSize, (setter)FSCatalogInfo_set_dataPhysicalSize, NULL},
+ {"rsrcLogicalSize", (getter)FSCatalogInfo_get_rsrcLogicalSize, (setter)FSCatalogInfo_set_rsrcLogicalSize, NULL},
+ {"rsrcPhysicalSize", (getter)FSCatalogInfo_get_rsrcPhysicalSize, (setter)FSCatalogInfo_set_rsrcPhysicalSize, NULL},
+ {"sharingFlags", (getter)FSCatalogInfo_get_sharingFlags, (setter)FSCatalogInfo_set_sharingFlags, NULL},
+ {"userPrivileges", (getter)FSCatalogInfo_get_userPrivileges, (setter)FSCatalogInfo_set_userPrivileges, NULL},
+ {"finderInfo", (getter)FSCatalogInfo_get_finderInfo, (setter)FSCatalogInfo_set_finderInfo, NULL},
+ {NULL, NULL, NULL, NULL},
+};
+
+
+#define FSCatalogInfo_compare NULL
+
+#define FSCatalogInfo_repr NULL
+
+#define FSCatalogInfo_hash NULL
+static int FSCatalogInfo_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kw[] = {
+ "nodeFlags",
+ "volume",
+ "parentDirID",
+ "nodeID",
+ "createDate",
+ "contentModDate",
+ "atributeModDate",
+ "accessDate",
+ "backupDate",
+ "valence",
+ "dataLogicalSize",
+ "dataPhysicalSize",
+ "rsrcLogicalSize",
+ "rsrcPhysicalSize",
+ "sharingFlags",
+ "userPrivileges"
+ , 0};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|HhllO&O&O&O&O&llllllb", kw, &((FSCatalogInfoObject *)self)->ob_itself.nodeFlags,
+ &((FSCatalogInfoObject *)self)->ob_itself.volume,
+ &((FSCatalogInfoObject *)self)->ob_itself.parentDirID,
+ &((FSCatalogInfoObject *)self)->ob_itself.nodeID,
+ UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.createDate,
+ UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.contentModDate,
+ UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.attributeModDate,
+ UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.accessDate,
+ UTCDateTime_Convert, &((FSCatalogInfoObject *)self)->ob_itself.backupDate,
+ &((FSCatalogInfoObject *)self)->ob_itself.valence,
+ &((FSCatalogInfoObject *)self)->ob_itself.dataLogicalSize,
+ &((FSCatalogInfoObject *)self)->ob_itself.dataPhysicalSize,
+ &((FSCatalogInfoObject *)self)->ob_itself.rsrcLogicalSize,
+ &((FSCatalogInfoObject *)self)->ob_itself.rsrcPhysicalSize,
+ &((FSCatalogInfoObject *)self)->ob_itself.sharingFlags,
+ &((FSCatalogInfoObject *)self)->ob_itself.userPrivileges))
+ {
+ return -1;
+ }
+ return 0;
+}
+
+#define FSCatalogInfo_tp_alloc PyType_GenericAlloc
+
+static PyObject *FSCatalogInfo_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *self;
+
+ if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
+ memset(&((FSCatalogInfoObject *)self)->ob_itself, 0, sizeof(FSCatalogInfo));
+ return self;
+}
+
+#define FSCatalogInfo_tp_free PyObject_Del
+
+
+static PyTypeObject FSCatalogInfo_Type = {
+ PyObject_HEAD_INIT(NULL)
+ 0, /*ob_size*/
+ "Carbon.File.FSCatalogInfo", /*tp_name*/
+ sizeof(FSCatalogInfoObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ /* methods */
+ (destructor) FSCatalogInfo_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ (getattrfunc)0, /*tp_getattr*/
+ (setattrfunc)0, /*tp_setattr*/
+ (cmpfunc) FSCatalogInfo_compare, /*tp_compare*/
+ (reprfunc) FSCatalogInfo_repr, /*tp_repr*/
+ (PyNumberMethods *)0, /* tp_as_number */
+ (PySequenceMethods *)0, /* tp_as_sequence */
+ (PyMappingMethods *)0, /* tp_as_mapping */
+ (hashfunc) FSCatalogInfo_hash, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ PyObject_GenericGetAttr, /*tp_getattro*/
+ PyObject_GenericSetAttr, /*tp_setattro */
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
+ 0, /*tp_doc*/
+ 0, /*tp_traverse*/
+ 0, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ FSCatalogInfo_methods, /* tp_methods */
+ 0, /*tp_members*/
+ FSCatalogInfo_getsetlist, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ FSCatalogInfo_tp_init, /* tp_init */
+ FSCatalogInfo_tp_alloc, /* tp_alloc */
+ FSCatalogInfo_tp_new, /* tp_new */
+ FSCatalogInfo_tp_free, /* tp_free */
+};
+
+/* ----------------- End object type FSCatalogInfo ------------------ */
+
+
+/* ----------------------- Object type FInfo ------------------------ */
+
+static PyTypeObject FInfo_Type;
+
+#define FInfo_Check(x) ((x)->ob_type == &FInfo_Type || PyObject_TypeCheck((x), &FInfo_Type))
+
+typedef struct FInfoObject {
+ PyObject_HEAD
+ FInfo ob_itself;
+} FInfoObject;
+
+static PyObject *FInfo_New(FInfo *itself)
+{
+ FInfoObject *it;
+ if (itself == NULL) return PyMac_Error(resNotFound);
+ it = PyObject_NEW(FInfoObject, &FInfo_Type);
+ if (it == NULL) return NULL;
+ it->ob_itself = *itself;
+ return (PyObject *)it;
+}
+static int FInfo_Convert(PyObject *v, FInfo *p_itself)
+{
+ if (!FInfo_Check(v))
+ {
+ PyErr_SetString(PyExc_TypeError, "FInfo required");
+ return 0;
+ }
+ *p_itself = ((FInfoObject *)v)->ob_itself;
+ return 1;
+}
+
+static void FInfo_dealloc(FInfoObject *self)
+{
+ /* Cleanup of self->ob_itself goes here */
+ self->ob_type->tp_free((PyObject *)self);
+}
+
+static PyMethodDef FInfo_methods[] = {
+ {NULL, NULL, 0}
+};
+
+static PyObject *FInfo_get_Type(FInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", PyMac_BuildOSType, self->ob_itself.fdType);
+}
+
+static int FInfo_set_Type(FInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", PyMac_GetOSType, &self->ob_itself.fdType)-1;
+ return 0;
+}
+
+static PyObject *FInfo_get_Creator(FInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", PyMac_BuildOSType, self->ob_itself.fdCreator);
+}
+
+static int FInfo_set_Creator(FInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", PyMac_GetOSType, &self->ob_itself.fdCreator)-1;
+ return 0;
+}
+
+static PyObject *FInfo_get_Flags(FInfoObject *self, void *closure)
+{
+ return Py_BuildValue("H", self->ob_itself.fdFlags);
+}
+
+static int FInfo_set_Flags(FInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "H", &self->ob_itself.fdFlags)-1;
+ return 0;
+}
+
+static PyObject *FInfo_get_Location(FInfoObject *self, void *closure)
+{
+ return Py_BuildValue("O&", PyMac_BuildPoint, self->ob_itself.fdLocation);
+}
+
+static int FInfo_set_Location(FInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "O&", PyMac_GetPoint, &self->ob_itself.fdLocation)-1;
+ return 0;
+}
+
+static PyObject *FInfo_get_Fldr(FInfoObject *self, void *closure)
+{
+ return Py_BuildValue("h", self->ob_itself.fdFldr);
+}
+
+static int FInfo_set_Fldr(FInfoObject *self, PyObject *v, void *closure)
+{
+ return PyArg_Parse(v, "h", &self->ob_itself.fdFldr)-1;
+ return 0;
+}
+
+static PyGetSetDef FInfo_getsetlist[] = {
+ {"Type", (getter)FInfo_get_Type, (setter)FInfo_set_Type, "4-char file type"},
+ {"Creator", (getter)FInfo_get_Creator, (setter)FInfo_set_Creator, "4-char file creator"},
+ {"Flags", (getter)FInfo_get_Flags, (setter)FInfo_set_Flags, "Finder flag bits"},
+ {"Location", (getter)FInfo_get_Location, (setter)FInfo_set_Location, "(x, y) location of the file's icon in its parent finder window"},
+ {"Fldr", (getter)FInfo_get_Fldr, (setter)FInfo_set_Fldr, "Original folder, for 'put away'"},
+ {NULL, NULL, NULL, NULL},
+};
+
+
+#define FInfo_compare NULL
+
+#define FInfo_repr NULL
+
+#define FInfo_hash NULL
+static int FInfo_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ FInfo *itself = NULL;
+ static char *kw[] = {"itself", 0};
+
+ if (PyArg_ParseTupleAndKeywords(args, kwds, "|O&", kw, FInfo_Convert, &itself))
+ {
+ if (itself) memcpy(&((FInfoObject *)self)->ob_itself, itself, sizeof(FInfo));
+ return 0;
+ }
+ return -1;
+}
+
+#define FInfo_tp_alloc PyType_GenericAlloc
+
+static PyObject *FInfo_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *self;
+
+ if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
+ memset(&((FInfoObject *)self)->ob_itself, 0, sizeof(FInfo));
+ return self;
+}
+
+#define FInfo_tp_free PyObject_Del
+
+
+static PyTypeObject FInfo_Type = {
+ PyObject_HEAD_INIT(NULL)
+ 0, /*ob_size*/
+ "Carbon.File.FInfo", /*tp_name*/
+ sizeof(FInfoObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ /* methods */
+ (destructor) FInfo_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ (getattrfunc)0, /*tp_getattr*/
+ (setattrfunc)0, /*tp_setattr*/
+ (cmpfunc) FInfo_compare, /*tp_compare*/
+ (reprfunc) FInfo_repr, /*tp_repr*/
+ (PyNumberMethods *)0, /* tp_as_number */
+ (PySequenceMethods *)0, /* tp_as_sequence */
+ (PyMappingMethods *)0, /* tp_as_mapping */
+ (hashfunc) FInfo_hash, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ PyObject_GenericGetAttr, /*tp_getattro*/
+ PyObject_GenericSetAttr, /*tp_setattro */
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
+ 0, /*tp_doc*/
+ 0, /*tp_traverse*/
+ 0, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ FInfo_methods, /* tp_methods */
+ 0, /*tp_members*/
+ FInfo_getsetlist, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ FInfo_tp_init, /* tp_init */
+ FInfo_tp_alloc, /* tp_alloc */
+ FInfo_tp_new, /* tp_new */
+ FInfo_tp_free, /* tp_free */
+};
+
+/* --------------------- End object type FInfo ---------------------- */
+
+
+/* ----------------------- Object type Alias ------------------------ */
+
+static PyTypeObject Alias_Type;
+
+#define Alias_Check(x) ((x)->ob_type == &Alias_Type || PyObject_TypeCheck((x), &Alias_Type))
+
+typedef struct AliasObject {
+ PyObject_HEAD
+ AliasHandle ob_itself;
+ void (*ob_freeit)(AliasHandle ptr);
+} AliasObject;
+
+static PyObject *Alias_New(AliasHandle itself)
+{
+ AliasObject *it;
+ if (itself == NULL) return PyMac_Error(resNotFound);
+ it = PyObject_NEW(AliasObject, &Alias_Type);
+ if (it == NULL) return NULL;
+ it->ob_itself = itself;
+ it->ob_freeit = NULL;
+ return (PyObject *)it;
+}
+static int Alias_Convert(PyObject *v, AliasHandle *p_itself)
+{
+ if (!Alias_Check(v))
+ {
+ PyErr_SetString(PyExc_TypeError, "Alias required");
+ return 0;
+ }
+ *p_itself = ((AliasObject *)v)->ob_itself;
+ return 1;
+}
+
+static void Alias_dealloc(AliasObject *self)
+{
+ if (self->ob_freeit && self->ob_itself)
+ {
+ self->ob_freeit(self->ob_itself);
+ }
+ self->ob_itself = NULL;
+ self->ob_type->tp_free((PyObject *)self);
+}
+
+static PyObject *Alias_ResolveAlias(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec fromFile__buf__;
+ FSSpec *fromFile = &fromFile__buf__;
+ FSSpec target;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "O&",
+ myPyMac_GetOptFSSpecPtr, &fromFile))
+ return NULL;
+ _err = ResolveAlias(fromFile,
+ _self->ob_itself,
+ &target,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSSpec_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyObject *Alias_GetAliasInfo(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ AliasInfoType index;
+ Str63 theString;
+ if (!PyArg_ParseTuple(_args, "h",
+ &index))
+ return NULL;
+ _err = GetAliasInfo(_self->ob_itself,
+ index,
+ theString);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ PyMac_BuildStr255, theString);
+ return _res;
+}
+
+static PyObject *Alias_ResolveAliasWithMountFlags(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec fromFile__buf__;
+ FSSpec *fromFile = &fromFile__buf__;
+ FSSpec target;
+ Boolean wasChanged;
+ unsigned long mountFlags;
+ if (!PyArg_ParseTuple(_args, "O&l",
+ myPyMac_GetOptFSSpecPtr, &fromFile,
+ &mountFlags))
+ return NULL;
+ _err = ResolveAliasWithMountFlags(fromFile,
+ _self->ob_itself,
+ &target,
+ &wasChanged,
+ mountFlags);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSSpec_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyObject *Alias_FollowFinderAlias(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec fromFile__buf__;
+ FSSpec *fromFile = &fromFile__buf__;
+ Boolean logon;
+ FSSpec target;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "O&b",
+ myPyMac_GetOptFSSpecPtr, &fromFile,
+ &logon))
+ return NULL;
+ _err = FollowFinderAlias(fromFile,
+ _self->ob_itself,
+ logon,
+ &target,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSSpec_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyObject *Alias_FSResolveAliasWithMountFlags(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef fromFile__buf__;
+ FSRef *fromFile = &fromFile__buf__;
+ FSRef target;
+ Boolean wasChanged;
+ unsigned long mountFlags;
+ if (!PyArg_ParseTuple(_args, "O&l",
+ myPyMac_GetOptFSRefPtr, &fromFile,
+ &mountFlags))
+ return NULL;
+ _err = FSResolveAliasWithMountFlags(fromFile,
+ _self->ob_itself,
+ &target,
+ &wasChanged,
+ mountFlags);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSRef_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyObject *Alias_FSResolveAlias(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef fromFile__buf__;
+ FSRef *fromFile = &fromFile__buf__;
+ FSRef target;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "O&",
+ myPyMac_GetOptFSRefPtr, &fromFile))
+ return NULL;
+ _err = FSResolveAlias(fromFile,
+ _self->ob_itself,
+ &target,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSRef_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyObject *Alias_FSFollowFinderAlias(AliasObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef fromFile;
+ Boolean logon;
+ FSRef target;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "b",
+ &logon))
+ return NULL;
+ _err = FSFollowFinderAlias(&fromFile,
+ _self->ob_itself,
+ logon,
+ &target,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&O&b",
+ FSRef_New, &fromFile,
+ FSRef_New, &target,
+ wasChanged);
+ return _res;
+}
+
+static PyMethodDef Alias_methods[] = {
+ {"ResolveAlias", (PyCFunction)Alias_ResolveAlias, 1,
+ PyDoc_STR("(FSSpec fromFile) -> (FSSpec target, Boolean wasChanged)")},
+ {"GetAliasInfo", (PyCFunction)Alias_GetAliasInfo, 1,
+ PyDoc_STR("(AliasInfoType index) -> (Str63 theString)")},
+ {"ResolveAliasWithMountFlags", (PyCFunction)Alias_ResolveAliasWithMountFlags, 1,
+ PyDoc_STR("(FSSpec fromFile, unsigned long mountFlags) -> (FSSpec target, Boolean wasChanged)")},
+ {"FollowFinderAlias", (PyCFunction)Alias_FollowFinderAlias, 1,
+ PyDoc_STR("(FSSpec fromFile, Boolean logon) -> (FSSpec target, Boolean wasChanged)")},
+ {"FSResolveAliasWithMountFlags", (PyCFunction)Alias_FSResolveAliasWithMountFlags, 1,
+ PyDoc_STR("(FSRef fromFile, unsigned long mountFlags) -> (FSRef target, Boolean wasChanged)")},
+ {"FSResolveAlias", (PyCFunction)Alias_FSResolveAlias, 1,
+ PyDoc_STR("(FSRef fromFile) -> (FSRef target, Boolean wasChanged)")},
+ {"FSFollowFinderAlias", (PyCFunction)Alias_FSFollowFinderAlias, 1,
+ PyDoc_STR("(Boolean logon) -> (FSRef fromFile, FSRef target, Boolean wasChanged)")},
+ {NULL, NULL, 0}
+};
+
+static PyObject *Alias_get_data(AliasObject *self, void *closure)
+{
+ int size;
+ PyObject *rv;
+
+ size = GetHandleSize((Handle)self->ob_itself);
+ HLock((Handle)self->ob_itself);
+ rv = PyString_FromStringAndSize(*(Handle)self->ob_itself, size);
+ HUnlock((Handle)self->ob_itself);
+ return rv;
+
+}
+
+#define Alias_set_data NULL
+
+static PyGetSetDef Alias_getsetlist[] = {
+ {"data", (getter)Alias_get_data, (setter)Alias_set_data, "Raw data of the alias object"},
+ {NULL, NULL, NULL, NULL},
+};
+
+
+#define Alias_compare NULL
+
+#define Alias_repr NULL
+
+#define Alias_hash NULL
+static int Alias_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ AliasHandle itself = NULL;
+ char *rawdata = NULL;
+ int rawdatalen = 0;
+ Handle h;
+ static char *kw[] = {"itself", "rawdata", 0};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O&s#", kw, Alias_Convert, &itself, &rawdata, &rawdatalen))
+ return -1;
+ if (itself && rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
+ return -1;
+ }
+ if (!itself && !rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
+ return -1;
+ }
+ if (rawdata)
+ {
+ if ((h = NewHandle(rawdatalen)) == NULL)
+ {
+ PyErr_NoMemory();
+ return -1;
+ }
+ HLock(h);
+ memcpy((char *)*h, rawdata, rawdatalen);
+ HUnlock(h);
+ ((AliasObject *)self)->ob_itself = (AliasHandle)h;
+ return 0;
+ }
+ ((AliasObject *)self)->ob_itself = itself;
+ return 0;
+}
+
+#define Alias_tp_alloc PyType_GenericAlloc
+
+static PyObject *Alias_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *self;
+
+ if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
+ ((AliasObject *)self)->ob_itself = NULL;
+ return self;
+}
+
+#define Alias_tp_free PyObject_Del
+
+
+static PyTypeObject Alias_Type = {
+ PyObject_HEAD_INIT(NULL)
+ 0, /*ob_size*/
+ "Carbon.File.Alias", /*tp_name*/
+ sizeof(AliasObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ /* methods */
+ (destructor) Alias_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ (getattrfunc)0, /*tp_getattr*/
+ (setattrfunc)0, /*tp_setattr*/
+ (cmpfunc) Alias_compare, /*tp_compare*/
+ (reprfunc) Alias_repr, /*tp_repr*/
+ (PyNumberMethods *)0, /* tp_as_number */
+ (PySequenceMethods *)0, /* tp_as_sequence */
+ (PyMappingMethods *)0, /* tp_as_mapping */
+ (hashfunc) Alias_hash, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ PyObject_GenericGetAttr, /*tp_getattro*/
+ PyObject_GenericSetAttr, /*tp_setattro */
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
+ 0, /*tp_doc*/
+ 0, /*tp_traverse*/
+ 0, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ Alias_methods, /* tp_methods */
+ 0, /*tp_members*/
+ Alias_getsetlist, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ Alias_tp_init, /* tp_init */
+ Alias_tp_alloc, /* tp_alloc */
+ Alias_tp_new, /* tp_new */
+ Alias_tp_free, /* tp_free */
+};
+
+/* --------------------- End object type Alias ---------------------- */
+
+
+/* ----------------------- Object type FSSpec ----------------------- */
+
+static PyTypeObject FSSpec_Type;
+
+#define FSSpec_Check(x) ((x)->ob_type == &FSSpec_Type || PyObject_TypeCheck((x), &FSSpec_Type))
+
+typedef struct FSSpecObject {
+ PyObject_HEAD
+ FSSpec ob_itself;
+} FSSpecObject;
+
+static PyObject *FSSpec_New(FSSpec *itself)
+{
+ FSSpecObject *it;
+ if (itself == NULL) return PyMac_Error(resNotFound);
+ it = PyObject_NEW(FSSpecObject, &FSSpec_Type);
+ if (it == NULL) return NULL;
+ it->ob_itself = *itself;
+ return (PyObject *)it;
+}
+
+static void FSSpec_dealloc(FSSpecObject *self)
+{
+ /* Cleanup of self->ob_itself goes here */
+ self->ob_type->tp_free((PyObject *)self);
+}
+
+static PyObject *FSSpec_FSpOpenDF(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt8 permission;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "b",
+ &permission))
+ return NULL;
+ _err = FSpOpenDF(&_self->ob_itself,
+ permission,
+ &refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ refNum);
+ return _res;
+}
+
+static PyObject *FSSpec_FSpOpenRF(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt8 permission;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "b",
+ &permission))
+ return NULL;
+ _err = FSpOpenRF(&_self->ob_itself,
+ permission,
+ &refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ refNum);
+ return _res;
+}
+
+static PyObject *FSSpec_FSpCreate(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ OSType creator;
+ OSType fileType;
+ ScriptCode scriptTag;
+ if (!PyArg_ParseTuple(_args, "O&O&h",
+ PyMac_GetOSType, &creator,
+ PyMac_GetOSType, &fileType,
+ &scriptTag))
+ return NULL;
+ _err = FSpCreate(&_self->ob_itself,
+ creator,
+ fileType,
+ scriptTag);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpDirCreate(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ ScriptCode scriptTag;
+ long createdDirID;
+ if (!PyArg_ParseTuple(_args, "h",
+ &scriptTag))
+ return NULL;
+ _err = FSpDirCreate(&_self->ob_itself,
+ scriptTag,
+ &createdDirID);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ createdDirID);
+ return _res;
+}
+
+static PyObject *FSSpec_FSpDelete(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSpDelete(&_self->ob_itself);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpGetFInfo(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FInfo fndrInfo;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSpGetFInfo(&_self->ob_itself,
+ &fndrInfo);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FInfo_New, &fndrInfo);
+ return _res;
+}
+
+static PyObject *FSSpec_FSpSetFInfo(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FInfo fndrInfo;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FInfo_Convert, &fndrInfo))
+ return NULL;
+ _err = FSpSetFInfo(&_self->ob_itself,
+ &fndrInfo);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpSetFLock(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSpSetFLock(&_self->ob_itself);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpRstFLock(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSpRstFLock(&_self->ob_itself);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpRename(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Str255 newName;
+ if (!PyArg_ParseTuple(_args, "O&",
+ PyMac_GetStr255, newName))
+ return NULL;
+ _err = FSpRename(&_self->ob_itself,
+ newName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpCatMove(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec dest;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FSSpec_Convert, &dest))
+ return NULL;
+ _err = FSpCatMove(&_self->ob_itself,
+ &dest);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpExchangeFiles(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec dest;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FSSpec_Convert, &dest))
+ return NULL;
+ _err = FSpExchangeFiles(&_self->ob_itself,
+ &dest);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSSpec_FSpMakeFSRef(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef newRef;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSpMakeFSRef(&_self->ob_itself,
+ &newRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FSRef_New, &newRef);
+ return _res;
+}
+
+static PyObject *FSSpec_NewAliasMinimal(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ AliasHandle alias;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = NewAliasMinimal(&_self->ob_itself,
+ &alias);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ Alias_New, alias);
+ return _res;
+}
+
+static PyObject *FSSpec_IsAliasFile(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Boolean aliasFileFlag;
+ Boolean folderFlag;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = IsAliasFile(&_self->ob_itself,
+ &aliasFileFlag,
+ &folderFlag);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("bb",
+ aliasFileFlag,
+ folderFlag);
+ return _res;
+}
+
+static PyObject *FSSpec_as_pathname(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+
+ char strbuf[1024];
+ OSErr err;
+
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ err = PyMac_GetFullPathname(&_self->ob_itself, strbuf, sizeof(strbuf));
+ if ( err ) {
+ PyMac_Error(err);
+ return NULL;
+ }
+ _res = PyString_FromString(strbuf);
+ return _res;
+
+}
+
+static PyObject *FSSpec_as_tuple(FSSpecObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _res = Py_BuildValue("(iis#)", _self->ob_itself.vRefNum, _self->ob_itself.parID,
+ &_self->ob_itself.name[1], _self->ob_itself.name[0]);
+ return _res;
+
+}
+
+static PyMethodDef FSSpec_methods[] = {
+ {"FSpOpenDF", (PyCFunction)FSSpec_FSpOpenDF, 1,
+ PyDoc_STR("(SInt8 permission) -> (short refNum)")},
+ {"FSpOpenRF", (PyCFunction)FSSpec_FSpOpenRF, 1,
+ PyDoc_STR("(SInt8 permission) -> (short refNum)")},
+ {"FSpCreate", (PyCFunction)FSSpec_FSpCreate, 1,
+ PyDoc_STR("(OSType creator, OSType fileType, ScriptCode scriptTag) -> None")},
+ {"FSpDirCreate", (PyCFunction)FSSpec_FSpDirCreate, 1,
+ PyDoc_STR("(ScriptCode scriptTag) -> (long createdDirID)")},
+ {"FSpDelete", (PyCFunction)FSSpec_FSpDelete, 1,
+ PyDoc_STR("() -> None")},
+ {"FSpGetFInfo", (PyCFunction)FSSpec_FSpGetFInfo, 1,
+ PyDoc_STR("() -> (FInfo fndrInfo)")},
+ {"FSpSetFInfo", (PyCFunction)FSSpec_FSpSetFInfo, 1,
+ PyDoc_STR("(FInfo fndrInfo) -> None")},
+ {"FSpSetFLock", (PyCFunction)FSSpec_FSpSetFLock, 1,
+ PyDoc_STR("() -> None")},
+ {"FSpRstFLock", (PyCFunction)FSSpec_FSpRstFLock, 1,
+ PyDoc_STR("() -> None")},
+ {"FSpRename", (PyCFunction)FSSpec_FSpRename, 1,
+ PyDoc_STR("(Str255 newName) -> None")},
+ {"FSpCatMove", (PyCFunction)FSSpec_FSpCatMove, 1,
+ PyDoc_STR("(FSSpec dest) -> None")},
+ {"FSpExchangeFiles", (PyCFunction)FSSpec_FSpExchangeFiles, 1,
+ PyDoc_STR("(FSSpec dest) -> None")},
+ {"FSpMakeFSRef", (PyCFunction)FSSpec_FSpMakeFSRef, 1,
+ PyDoc_STR("() -> (FSRef newRef)")},
+ {"NewAliasMinimal", (PyCFunction)FSSpec_NewAliasMinimal, 1,
+ PyDoc_STR("() -> (AliasHandle alias)")},
+ {"IsAliasFile", (PyCFunction)FSSpec_IsAliasFile, 1,
+ PyDoc_STR("() -> (Boolean aliasFileFlag, Boolean folderFlag)")},
+ {"as_pathname", (PyCFunction)FSSpec_as_pathname, 1,
+ PyDoc_STR("() -> string")},
+ {"as_tuple", (PyCFunction)FSSpec_as_tuple, 1,
+ PyDoc_STR("() -> (vRefNum, dirID, name)")},
+ {NULL, NULL, 0}
+};
+
+static PyObject *FSSpec_get_data(FSSpecObject *self, void *closure)
+{
+ return PyString_FromStringAndSize((char *)&self->ob_itself, sizeof(self->ob_itself));
+}
+
+#define FSSpec_set_data NULL
+
+static PyGetSetDef FSSpec_getsetlist[] = {
+ {"data", (getter)FSSpec_get_data, (setter)FSSpec_set_data, "Raw data of the FSSpec object"},
+ {NULL, NULL, NULL, NULL},
+};
+
+
+#define FSSpec_compare NULL
+
+static PyObject * FSSpec_repr(FSSpecObject *self)
+{
+ char buf[512];
+ PyOS_snprintf(buf, sizeof(buf), "%s((%d, %ld, '%.*s'))",
+ self->ob_type->tp_name,
+ self->ob_itself.vRefNum,
+ self->ob_itself.parID,
+ self->ob_itself.name[0], self->ob_itself.name+1);
+ return PyString_FromString(buf);
+}
+
+#define FSSpec_hash NULL
+static int FSSpec_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *v = NULL;
+ char *rawdata = NULL;
+ int rawdatalen = 0;
+ static char *kw[] = {"itself", "rawdata", 0};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|Os#", kw, &v, &rawdata, &rawdatalen))
+ return -1;
+ if (v && rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
+ return -1;
+ }
+ if (!v && !rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
+ return -1;
+ }
+ if (rawdata)
+ {
+ if (rawdatalen != sizeof(FSSpec))
+ {
+ PyErr_SetString(PyExc_TypeError, "FSSpec rawdata incorrect size");
+ return -1;
+ }
+ memcpy(&((FSSpecObject *)self)->ob_itself, rawdata, rawdatalen);
+ return 0;
+ }
+ if (PyMac_GetFSSpec(v, &((FSSpecObject *)self)->ob_itself)) return 0;
+ return -1;
+}
+
+#define FSSpec_tp_alloc PyType_GenericAlloc
+
+static PyObject *FSSpec_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *self;
+
+ if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
+ memset(&((FSSpecObject *)self)->ob_itself, 0, sizeof(FSSpec));
+ return self;
+}
+
+#define FSSpec_tp_free PyObject_Del
+
+
+static PyTypeObject FSSpec_Type = {
+ PyObject_HEAD_INIT(NULL)
+ 0, /*ob_size*/
+ "Carbon.File.FSSpec", /*tp_name*/
+ sizeof(FSSpecObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ /* methods */
+ (destructor) FSSpec_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ (getattrfunc)0, /*tp_getattr*/
+ (setattrfunc)0, /*tp_setattr*/
+ (cmpfunc) FSSpec_compare, /*tp_compare*/
+ (reprfunc) FSSpec_repr, /*tp_repr*/
+ (PyNumberMethods *)0, /* tp_as_number */
+ (PySequenceMethods *)0, /* tp_as_sequence */
+ (PyMappingMethods *)0, /* tp_as_mapping */
+ (hashfunc) FSSpec_hash, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ PyObject_GenericGetAttr, /*tp_getattro*/
+ PyObject_GenericSetAttr, /*tp_setattro */
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
+ 0, /*tp_doc*/
+ 0, /*tp_traverse*/
+ 0, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ FSSpec_methods, /* tp_methods */
+ 0, /*tp_members*/
+ FSSpec_getsetlist, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ FSSpec_tp_init, /* tp_init */
+ FSSpec_tp_alloc, /* tp_alloc */
+ FSSpec_tp_new, /* tp_new */
+ FSSpec_tp_free, /* tp_free */
+};
+
+/* --------------------- End object type FSSpec --------------------- */
+
+
+/* ----------------------- Object type FSRef ------------------------ */
+
+static PyTypeObject FSRef_Type;
+
+#define FSRef_Check(x) ((x)->ob_type == &FSRef_Type || PyObject_TypeCheck((x), &FSRef_Type))
+
+typedef struct FSRefObject {
+ PyObject_HEAD
+ FSRef ob_itself;
+} FSRefObject;
+
+static PyObject *FSRef_New(FSRef *itself)
+{
+ FSRefObject *it;
+ if (itself == NULL) return PyMac_Error(resNotFound);
+ it = PyObject_NEW(FSRefObject, &FSRef_Type);
+ if (it == NULL) return NULL;
+ it->ob_itself = *itself;
+ return (PyObject *)it;
+}
+
+static void FSRef_dealloc(FSRefObject *self)
+{
+ /* Cleanup of self->ob_itself goes here */
+ self->ob_type->tp_free((PyObject *)self);
+}
+
+static PyObject *FSRef_FSMakeFSRefUnicode(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *nameLength__in__;
+ UniCharCount nameLength__len__;
+ int nameLength__in_len__;
+ TextEncoding textEncodingHint;
+ FSRef newRef;
+ if (!PyArg_ParseTuple(_args, "u#l",
+ &nameLength__in__, &nameLength__in_len__,
+ &textEncodingHint))
+ return NULL;
+ nameLength__len__ = nameLength__in_len__;
+ _err = FSMakeFSRefUnicode(&_self->ob_itself,
+ nameLength__len__, nameLength__in__,
+ textEncodingHint,
+ &newRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FSRef_New, &newRef);
+ return _res;
+}
+
+static PyObject *FSRef_FSCompareFSRefs(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef ref2;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FSRef_Convert, &ref2))
+ return NULL;
+ _err = FSCompareFSRefs(&_self->ob_itself,
+ &ref2);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSCreateFileUnicode(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *nameLength__in__;
+ UniCharCount nameLength__len__;
+ int nameLength__in_len__;
+ FSCatalogInfoBitmap whichInfo;
+ FSCatalogInfo catalogInfo;
+ FSRef newRef;
+ FSSpec newSpec;
+ if (!PyArg_ParseTuple(_args, "u#lO&",
+ &nameLength__in__, &nameLength__in_len__,
+ &whichInfo,
+ FSCatalogInfo_Convert, &catalogInfo))
+ return NULL;
+ nameLength__len__ = nameLength__in_len__;
+ _err = FSCreateFileUnicode(&_self->ob_itself,
+ nameLength__len__, nameLength__in__,
+ whichInfo,
+ &catalogInfo,
+ &newRef,
+ &newSpec);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&O&",
+ FSRef_New, &newRef,
+ FSSpec_New, &newSpec);
+ return _res;
+}
+
+static PyObject *FSRef_FSCreateDirectoryUnicode(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *nameLength__in__;
+ UniCharCount nameLength__len__;
+ int nameLength__in_len__;
+ FSCatalogInfoBitmap whichInfo;
+ FSCatalogInfo catalogInfo;
+ FSRef newRef;
+ FSSpec newSpec;
+ UInt32 newDirID;
+ if (!PyArg_ParseTuple(_args, "u#lO&",
+ &nameLength__in__, &nameLength__in_len__,
+ &whichInfo,
+ FSCatalogInfo_Convert, &catalogInfo))
+ return NULL;
+ nameLength__len__ = nameLength__in_len__;
+ _err = FSCreateDirectoryUnicode(&_self->ob_itself,
+ nameLength__len__, nameLength__in__,
+ whichInfo,
+ &catalogInfo,
+ &newRef,
+ &newSpec,
+ &newDirID);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&O&l",
+ FSRef_New, &newRef,
+ FSSpec_New, &newSpec,
+ newDirID);
+ return _res;
+}
+
+static PyObject *FSRef_FSDeleteObject(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSDeleteObject(&_self->ob_itself);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSMoveObject(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef destDirectory;
+ FSRef newRef;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FSRef_Convert, &destDirectory))
+ return NULL;
+ _err = FSMoveObject(&_self->ob_itself,
+ &destDirectory,
+ &newRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FSRef_New, &newRef);
+ return _res;
+}
+
+static PyObject *FSRef_FSExchangeObjects(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef destRef;
+ if (!PyArg_ParseTuple(_args, "O&",
+ FSRef_Convert, &destRef))
+ return NULL;
+ _err = FSExchangeObjects(&_self->ob_itself,
+ &destRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSRenameUnicode(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *nameLength__in__;
+ UniCharCount nameLength__len__;
+ int nameLength__in_len__;
+ TextEncoding textEncodingHint;
+ FSRef newRef;
+ if (!PyArg_ParseTuple(_args, "u#l",
+ &nameLength__in__, &nameLength__in_len__,
+ &textEncodingHint))
+ return NULL;
+ nameLength__len__ = nameLength__in_len__;
+ _err = FSRenameUnicode(&_self->ob_itself,
+ nameLength__len__, nameLength__in__,
+ textEncodingHint,
+ &newRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FSRef_New, &newRef);
+ return _res;
+}
+
+static PyObject *FSRef_FSGetCatalogInfo(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSCatalogInfoBitmap whichInfo;
+ FSCatalogInfo catalogInfo;
+ HFSUniStr255 outName;
+ FSSpec fsSpec;
+ FSRef parentRef;
+ if (!PyArg_ParseTuple(_args, "l",
+ &whichInfo))
+ return NULL;
+ _err = FSGetCatalogInfo(&_self->ob_itself,
+ whichInfo,
+ &catalogInfo,
+ &outName,
+ &fsSpec,
+ &parentRef);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&O&O&O&",
+ FSCatalogInfo_New, &catalogInfo,
+ PyMac_BuildHFSUniStr255, &outName,
+ FSSpec_New, &fsSpec,
+ FSRef_New, &parentRef);
+ return _res;
+}
+
+static PyObject *FSRef_FSSetCatalogInfo(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSCatalogInfoBitmap whichInfo;
+ FSCatalogInfo catalogInfo;
+ if (!PyArg_ParseTuple(_args, "lO&",
+ &whichInfo,
+ FSCatalogInfo_Convert, &catalogInfo))
+ return NULL;
+ _err = FSSetCatalogInfo(&_self->ob_itself,
+ whichInfo,
+ &catalogInfo);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSCreateFork(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *forkNameLength__in__;
+ UniCharCount forkNameLength__len__;
+ int forkNameLength__in_len__;
+ if (!PyArg_ParseTuple(_args, "u#",
+ &forkNameLength__in__, &forkNameLength__in_len__))
+ return NULL;
+ forkNameLength__len__ = forkNameLength__in_len__;
+ _err = FSCreateFork(&_self->ob_itself,
+ forkNameLength__len__, forkNameLength__in__);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSDeleteFork(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *forkNameLength__in__;
+ UniCharCount forkNameLength__len__;
+ int forkNameLength__in_len__;
+ if (!PyArg_ParseTuple(_args, "u#",
+ &forkNameLength__in__, &forkNameLength__in_len__))
+ return NULL;
+ forkNameLength__len__ = forkNameLength__in_len__;
+ _err = FSDeleteFork(&_self->ob_itself,
+ forkNameLength__len__, forkNameLength__in__);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *FSRef_FSOpenFork(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ UniChar *forkNameLength__in__;
+ UniCharCount forkNameLength__len__;
+ int forkNameLength__in_len__;
+ SInt8 permissions;
+ SInt16 forkRefNum;
+ if (!PyArg_ParseTuple(_args, "u#b",
+ &forkNameLength__in__, &forkNameLength__in_len__,
+ &permissions))
+ return NULL;
+ forkNameLength__len__ = forkNameLength__in_len__;
+ _err = FSOpenFork(&_self->ob_itself,
+ forkNameLength__len__, forkNameLength__in__,
+ permissions,
+ &forkRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ forkRefNum);
+ return _res;
+}
+
+#if TARGET_API_MAC_OSX
+
+static PyObject *FSRef_FNNotify(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSStatus _err;
+ FNMessage message;
+ OptionBits flags;
+ if (!PyArg_ParseTuple(_args, "ll",
+ &message,
+ &flags))
+ return NULL;
+ _err = FNNotify(&_self->ob_itself,
+ message,
+ flags);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+#endif
+
+static PyObject *FSRef_FSNewAliasMinimal(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ AliasHandle inAlias;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSNewAliasMinimal(&_self->ob_itself,
+ &inAlias);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ Alias_New, inAlias);
+ return _res;
+}
+
+static PyObject *FSRef_FSIsAliasFile(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Boolean aliasFileFlag;
+ Boolean folderFlag;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSIsAliasFile(&_self->ob_itself,
+ &aliasFileFlag,
+ &folderFlag);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("bb",
+ aliasFileFlag,
+ folderFlag);
+ return _res;
+}
+
+static PyObject *FSRef_FSRefMakePath(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+
+ OSStatus _err;
+#define MAXPATHNAME 1024
+ UInt8 path[MAXPATHNAME];
+ UInt32 maxPathSize = MAXPATHNAME;
+
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSRefMakePath(&_self->ob_itself,
+ path,
+ maxPathSize);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("s", path);
+ return _res;
+
+}
+
+static PyObject *FSRef_as_pathname(FSRefObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+
+#if TARGET_API_MAC_OSX
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _res = FSRef_FSRefMakePath(_self, _args);
+#else
+ char strbuf[1024];
+ OSErr err;
+ FSSpec fss;
+
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ if ( !PyMac_GetFSSpec((PyObject *)_self, &fss))
+ return NULL;
+ err = PyMac_GetFullPathname(&fss, strbuf, sizeof(strbuf));
+ if ( err ) {
+ PyMac_Error(err);
+ return NULL;
+ }
+ _res = PyString_FromString(strbuf);
+#endif
+ return _res;
+
+}
+
+static PyMethodDef FSRef_methods[] = {
+ {"FSMakeFSRefUnicode", (PyCFunction)FSRef_FSMakeFSRefUnicode, 1,
+ PyDoc_STR("(Buffer nameLength, TextEncoding textEncodingHint) -> (FSRef newRef)")},
+ {"FSCompareFSRefs", (PyCFunction)FSRef_FSCompareFSRefs, 1,
+ PyDoc_STR("(FSRef ref2) -> None")},
+ {"FSCreateFileUnicode", (PyCFunction)FSRef_FSCreateFileUnicode, 1,
+ PyDoc_STR("(Buffer nameLength, FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> (FSRef newRef, FSSpec newSpec)")},
+ {"FSCreateDirectoryUnicode", (PyCFunction)FSRef_FSCreateDirectoryUnicode, 1,
+ PyDoc_STR("(Buffer nameLength, FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> (FSRef newRef, FSSpec newSpec, UInt32 newDirID)")},
+ {"FSDeleteObject", (PyCFunction)FSRef_FSDeleteObject, 1,
+ PyDoc_STR("() -> None")},
+ {"FSMoveObject", (PyCFunction)FSRef_FSMoveObject, 1,
+ PyDoc_STR("(FSRef destDirectory) -> (FSRef newRef)")},
+ {"FSExchangeObjects", (PyCFunction)FSRef_FSExchangeObjects, 1,
+ PyDoc_STR("(FSRef destRef) -> None")},
+ {"FSRenameUnicode", (PyCFunction)FSRef_FSRenameUnicode, 1,
+ PyDoc_STR("(Buffer nameLength, TextEncoding textEncodingHint) -> (FSRef newRef)")},
+ {"FSGetCatalogInfo", (PyCFunction)FSRef_FSGetCatalogInfo, 1,
+ PyDoc_STR("(FSCatalogInfoBitmap whichInfo) -> (FSCatalogInfo catalogInfo, HFSUniStr255 outName, FSSpec fsSpec, FSRef parentRef)")},
+ {"FSSetCatalogInfo", (PyCFunction)FSRef_FSSetCatalogInfo, 1,
+ PyDoc_STR("(FSCatalogInfoBitmap whichInfo, FSCatalogInfo catalogInfo) -> None")},
+ {"FSCreateFork", (PyCFunction)FSRef_FSCreateFork, 1,
+ PyDoc_STR("(Buffer forkNameLength) -> None")},
+ {"FSDeleteFork", (PyCFunction)FSRef_FSDeleteFork, 1,
+ PyDoc_STR("(Buffer forkNameLength) -> None")},
+ {"FSOpenFork", (PyCFunction)FSRef_FSOpenFork, 1,
+ PyDoc_STR("(Buffer forkNameLength, SInt8 permissions) -> (SInt16 forkRefNum)")},
+
+#if TARGET_API_MAC_OSX
+ {"FNNotify", (PyCFunction)FSRef_FNNotify, 1,
+ PyDoc_STR("(FNMessage message, OptionBits flags) -> None")},
+#endif
+ {"FSNewAliasMinimal", (PyCFunction)FSRef_FSNewAliasMinimal, 1,
+ PyDoc_STR("() -> (AliasHandle inAlias)")},
+ {"FSIsAliasFile", (PyCFunction)FSRef_FSIsAliasFile, 1,
+ PyDoc_STR("() -> (Boolean aliasFileFlag, Boolean folderFlag)")},
+ {"FSRefMakePath", (PyCFunction)FSRef_FSRefMakePath, 1,
+ PyDoc_STR("() -> string")},
+ {"as_pathname", (PyCFunction)FSRef_as_pathname, 1,
+ PyDoc_STR("() -> string")},
+ {NULL, NULL, 0}
+};
+
+static PyObject *FSRef_get_data(FSRefObject *self, void *closure)
+{
+ return PyString_FromStringAndSize((char *)&self->ob_itself, sizeof(self->ob_itself));
+}
+
+#define FSRef_set_data NULL
+
+static PyGetSetDef FSRef_getsetlist[] = {
+ {"data", (getter)FSRef_get_data, (setter)FSRef_set_data, "Raw data of the FSRef object"},
+ {NULL, NULL, NULL, NULL},
+};
+
+
+#define FSRef_compare NULL
+
+#define FSRef_repr NULL
+
+#define FSRef_hash NULL
+static int FSRef_tp_init(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *v = NULL;
+ char *rawdata = NULL;
+ int rawdatalen = 0;
+ static char *kw[] = {"itself", "rawdata", 0};
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "|Os#", kw, &v, &rawdata, &rawdatalen))
+ return -1;
+ if (v && rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "Only one of itself or rawdata may be specified");
+ return -1;
+ }
+ if (!v && !rawdata)
+ {
+ PyErr_SetString(PyExc_TypeError, "One of itself or rawdata must be specified");
+ return -1;
+ }
+ if (rawdata)
+ {
+ if (rawdatalen != sizeof(FSRef))
+ {
+ PyErr_SetString(PyExc_TypeError, "FSRef rawdata incorrect size");
+ return -1;
+ }
+ memcpy(&((FSRefObject *)self)->ob_itself, rawdata, rawdatalen);
+ return 0;
+ }
+ if (PyMac_GetFSRef(v, &((FSRefObject *)self)->ob_itself)) return 0;
+ return -1;
+}
+
+#define FSRef_tp_alloc PyType_GenericAlloc
+
+static PyObject *FSRef_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyObject *self;
+
+ if ((self = type->tp_alloc(type, 0)) == NULL) return NULL;
+ memset(&((FSRefObject *)self)->ob_itself, 0, sizeof(FSRef));
+ return self;
+}
+
+#define FSRef_tp_free PyObject_Del
+
+
+static PyTypeObject FSRef_Type = {
+ PyObject_HEAD_INIT(NULL)
+ 0, /*ob_size*/
+ "Carbon.File.FSRef", /*tp_name*/
+ sizeof(FSRefObject), /*tp_basicsize*/
+ 0, /*tp_itemsize*/
+ /* methods */
+ (destructor) FSRef_dealloc, /*tp_dealloc*/
+ 0, /*tp_print*/
+ (getattrfunc)0, /*tp_getattr*/
+ (setattrfunc)0, /*tp_setattr*/
+ (cmpfunc) FSRef_compare, /*tp_compare*/
+ (reprfunc) FSRef_repr, /*tp_repr*/
+ (PyNumberMethods *)0, /* tp_as_number */
+ (PySequenceMethods *)0, /* tp_as_sequence */
+ (PyMappingMethods *)0, /* tp_as_mapping */
+ (hashfunc) FSRef_hash, /*tp_hash*/
+ 0, /*tp_call*/
+ 0, /*tp_str*/
+ PyObject_GenericGetAttr, /*tp_getattro*/
+ PyObject_GenericSetAttr, /*tp_setattro */
+ 0, /*tp_as_buffer*/
+ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
+ 0, /*tp_doc*/
+ 0, /*tp_traverse*/
+ 0, /*tp_clear*/
+ 0, /*tp_richcompare*/
+ 0, /*tp_weaklistoffset*/
+ 0, /*tp_iter*/
+ 0, /*tp_iternext*/
+ FSRef_methods, /* tp_methods */
+ 0, /*tp_members*/
+ FSRef_getsetlist, /*tp_getset*/
+ 0, /*tp_base*/
+ 0, /*tp_dict*/
+ 0, /*tp_descr_get*/
+ 0, /*tp_descr_set*/
+ 0, /*tp_dictoffset*/
+ FSRef_tp_init, /* tp_init */
+ FSRef_tp_alloc, /* tp_alloc */
+ FSRef_tp_new, /* tp_new */
+ FSRef_tp_free, /* tp_free */
+};
+
+/* --------------------- End object type FSRef ---------------------- */
+
+
+static PyObject *File_UnmountVol(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Str63 volName;
+ short vRefNum;
+ if (!PyArg_ParseTuple(_args, "O&h",
+ PyMac_GetStr255, volName,
+ &vRefNum))
+ return NULL;
+ _err = UnmountVol(volName,
+ vRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FlushVol(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Str63 volName;
+ short vRefNum;
+ if (!PyArg_ParseTuple(_args, "O&h",
+ PyMac_GetStr255, volName,
+ &vRefNum))
+ return NULL;
+ _err = FlushVol(volName,
+ vRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_HSetVol(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ Str63 volName;
+ short vRefNum;
+ long dirID;
+ if (!PyArg_ParseTuple(_args, "O&hl",
+ PyMac_GetStr255, volName,
+ &vRefNum,
+ &dirID))
+ return NULL;
+ _err = HSetVol(volName,
+ vRefNum,
+ dirID);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSClose(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "h",
+ &refNum))
+ return NULL;
+ _err = FSClose(refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_Allocate(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ long count;
+ if (!PyArg_ParseTuple(_args, "h",
+ &refNum))
+ return NULL;
+ _err = Allocate(refNum,
+ &count);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ count);
+ return _res;
+}
+
+static PyObject *File_GetEOF(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ long logEOF;
+ if (!PyArg_ParseTuple(_args, "h",
+ &refNum))
+ return NULL;
+ _err = GetEOF(refNum,
+ &logEOF);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ logEOF);
+ return _res;
+}
+
+static PyObject *File_SetEOF(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ long logEOF;
+ if (!PyArg_ParseTuple(_args, "hl",
+ &refNum,
+ &logEOF))
+ return NULL;
+ _err = SetEOF(refNum,
+ logEOF);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_GetFPos(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ long filePos;
+ if (!PyArg_ParseTuple(_args, "h",
+ &refNum))
+ return NULL;
+ _err = GetFPos(refNum,
+ &filePos);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ filePos);
+ return _res;
+}
+
+static PyObject *File_SetFPos(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ short posMode;
+ long posOff;
+ if (!PyArg_ParseTuple(_args, "hhl",
+ &refNum,
+ &posMode,
+ &posOff))
+ return NULL;
+ _err = SetFPos(refNum,
+ posMode,
+ posOff);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_GetVRefNum(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short fileRefNum;
+ short vRefNum;
+ if (!PyArg_ParseTuple(_args, "h",
+ &fileRefNum))
+ return NULL;
+ _err = GetVRefNum(fileRefNum,
+ &vRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ vRefNum);
+ return _res;
+}
+
+static PyObject *File_HGetVol(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ StringPtr volName;
+ short vRefNum;
+ long dirID;
+ if (!PyArg_ParseTuple(_args, "O&",
+ PyMac_GetStr255, &volName))
+ return NULL;
+ _err = HGetVol(volName,
+ &vRefNum,
+ &dirID);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("hl",
+ vRefNum,
+ dirID);
+ return _res;
+}
+
+static PyObject *File_HOpen(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ SInt8 permission;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "hlO&b",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName,
+ &permission))
+ return NULL;
+ _err = HOpen(vRefNum,
+ dirID,
+ fileName,
+ permission,
+ &refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ refNum);
+ return _res;
+}
+
+static PyObject *File_HOpenDF(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ SInt8 permission;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "hlO&b",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName,
+ &permission))
+ return NULL;
+ _err = HOpenDF(vRefNum,
+ dirID,
+ fileName,
+ permission,
+ &refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ refNum);
+ return _res;
+}
+
+static PyObject *File_HOpenRF(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ SInt8 permission;
+ short refNum;
+ if (!PyArg_ParseTuple(_args, "hlO&b",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName,
+ &permission))
+ return NULL;
+ _err = HOpenRF(vRefNum,
+ dirID,
+ fileName,
+ permission,
+ &refNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("h",
+ refNum);
+ return _res;
+}
+
+static PyObject *File_AllocContig(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short refNum;
+ long count;
+ if (!PyArg_ParseTuple(_args, "h",
+ &refNum))
+ return NULL;
+ _err = AllocContig(refNum,
+ &count);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ count);
+ return _res;
+}
+
+static PyObject *File_HCreate(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ OSType creator;
+ OSType fileType;
+ if (!PyArg_ParseTuple(_args, "hlO&O&O&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName,
+ PyMac_GetOSType, &creator,
+ PyMac_GetOSType, &fileType))
+ return NULL;
+ _err = HCreate(vRefNum,
+ dirID,
+ fileName,
+ creator,
+ fileType);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_DirCreate(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long parentDirID;
+ Str255 directoryName;
+ long createdDirID;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &parentDirID,
+ PyMac_GetStr255, directoryName))
+ return NULL;
+ _err = DirCreate(vRefNum,
+ parentDirID,
+ directoryName,
+ &createdDirID);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("l",
+ createdDirID);
+ return _res;
+}
+
+static PyObject *File_HDelete(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName))
+ return NULL;
+ _err = HDelete(vRefNum,
+ dirID,
+ fileName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_HGetFInfo(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ FInfo fndrInfo;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName))
+ return NULL;
+ _err = HGetFInfo(vRefNum,
+ dirID,
+ fileName,
+ &fndrInfo);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FInfo_New, &fndrInfo);
+ return _res;
+}
+
+static PyObject *File_HSetFInfo(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ FInfo fndrInfo;
+ if (!PyArg_ParseTuple(_args, "hlO&O&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName,
+ FInfo_Convert, &fndrInfo))
+ return NULL;
+ _err = HSetFInfo(vRefNum,
+ dirID,
+ fileName,
+ &fndrInfo);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_HSetFLock(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName))
+ return NULL;
+ _err = HSetFLock(vRefNum,
+ dirID,
+ fileName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_HRstFLock(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName))
+ return NULL;
+ _err = HRstFLock(vRefNum,
+ dirID,
+ fileName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_HRename(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 oldName;
+ Str255 newName;
+ if (!PyArg_ParseTuple(_args, "hlO&O&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, oldName,
+ PyMac_GetStr255, newName))
+ return NULL;
+ _err = HRename(vRefNum,
+ dirID,
+ oldName,
+ newName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_CatMove(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 oldName;
+ long newDirID;
+ Str255 newName;
+ if (!PyArg_ParseTuple(_args, "hlO&lO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, oldName,
+ &newDirID,
+ PyMac_GetStr255, newName))
+ return NULL;
+ _err = CatMove(vRefNum,
+ dirID,
+ oldName,
+ newDirID,
+ newName);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSMakeFSSpec(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ short vRefNum;
+ long dirID;
+ Str255 fileName;
+ FSSpec spec;
+ if (!PyArg_ParseTuple(_args, "hlO&",
+ &vRefNum,
+ &dirID,
+ PyMac_GetStr255, fileName))
+ return NULL;
+ _err = FSMakeFSSpec(vRefNum,
+ dirID,
+ fileName,
+ &spec);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ FSSpec_New, &spec);
+ return _res;
+}
+
+static PyObject *File_FSGetForkPosition(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ SInt64 position;
+ if (!PyArg_ParseTuple(_args, "h",
+ &forkRefNum))
+ return NULL;
+ _err = FSGetForkPosition(forkRefNum,
+ &position);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("L",
+ position);
+ return _res;
+}
+
+static PyObject *File_FSSetForkPosition(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ UInt16 positionMode;
+ SInt64 positionOffset;
+ if (!PyArg_ParseTuple(_args, "hHL",
+ &forkRefNum,
+ &positionMode,
+ &positionOffset))
+ return NULL;
+ _err = FSSetForkPosition(forkRefNum,
+ positionMode,
+ positionOffset);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSGetForkSize(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ SInt64 forkSize;
+ if (!PyArg_ParseTuple(_args, "h",
+ &forkRefNum))
+ return NULL;
+ _err = FSGetForkSize(forkRefNum,
+ &forkSize);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("L",
+ forkSize);
+ return _res;
+}
+
+static PyObject *File_FSSetForkSize(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ UInt16 positionMode;
+ SInt64 positionOffset;
+ if (!PyArg_ParseTuple(_args, "hHL",
+ &forkRefNum,
+ &positionMode,
+ &positionOffset))
+ return NULL;
+ _err = FSSetForkSize(forkRefNum,
+ positionMode,
+ positionOffset);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSAllocateFork(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ FSAllocationFlags flags;
+ UInt16 positionMode;
+ SInt64 positionOffset;
+ UInt64 requestCount;
+ UInt64 actualCount;
+ if (!PyArg_ParseTuple(_args, "hHHLL",
+ &forkRefNum,
+ &flags,
+ &positionMode,
+ &positionOffset,
+ &requestCount))
+ return NULL;
+ _err = FSAllocateFork(forkRefNum,
+ flags,
+ positionMode,
+ positionOffset,
+ requestCount,
+ &actualCount);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("L",
+ actualCount);
+ return _res;
+}
+
+static PyObject *File_FSFlushFork(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ if (!PyArg_ParseTuple(_args, "h",
+ &forkRefNum))
+ return NULL;
+ _err = FSFlushFork(forkRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSCloseFork(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ SInt16 forkRefNum;
+ if (!PyArg_ParseTuple(_args, "h",
+ &forkRefNum))
+ return NULL;
+ _err = FSCloseFork(forkRefNum);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+
+static PyObject *File_FSGetDataForkName(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ HFSUniStr255 dataForkName;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSGetDataForkName(&dataForkName);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ PyMac_BuildHFSUniStr255, &dataForkName);
+ return _res;
+}
+
+static PyObject *File_FSGetResourceForkName(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ HFSUniStr255 resourceForkName;
+ if (!PyArg_ParseTuple(_args, ""))
+ return NULL;
+ _err = FSGetResourceForkName(&resourceForkName);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ PyMac_BuildHFSUniStr255, &resourceForkName);
+ return _res;
+}
+
+static PyObject *File_FSPathMakeRef(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSStatus _err;
+ UInt8 * path;
+ FSRef ref;
+ Boolean isDirectory;
+ if (!PyArg_ParseTuple(_args, "s",
+ &path))
+ return NULL;
+ _err = FSPathMakeRef(path,
+ &ref,
+ &isDirectory);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&b",
+ FSRef_New, &ref,
+ isDirectory);
+ return _res;
+}
+
+#if TARGET_API_MAC_OSX
+
+static PyObject *File_FNNotifyByPath(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSStatus _err;
+ UInt8 * path;
+ FNMessage message;
+ OptionBits flags;
+ if (!PyArg_ParseTuple(_args, "sll",
+ &path,
+ &message,
+ &flags))
+ return NULL;
+ _err = FNNotifyByPath(path,
+ message,
+ flags);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+#endif
+
+#if TARGET_API_MAC_OSX
+
+static PyObject *File_FNNotifyAll(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSStatus _err;
+ FNMessage message;
+ OptionBits flags;
+ if (!PyArg_ParseTuple(_args, "ll",
+ &message,
+ &flags))
+ return NULL;
+ _err = FNNotifyAll(message,
+ flags);
+ if (_err != noErr) return PyMac_Error(_err);
+ Py_INCREF(Py_None);
+ _res = Py_None;
+ return _res;
+}
+#endif
+
+static PyObject *File_NewAlias(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec fromFile__buf__;
+ FSSpec *fromFile = &fromFile__buf__;
+ FSSpec target;
+ AliasHandle alias;
+ if (!PyArg_ParseTuple(_args, "O&O&",
+ myPyMac_GetOptFSSpecPtr, &fromFile,
+ FSSpec_Convert, &target))
+ return NULL;
+ _err = NewAlias(fromFile,
+ &target,
+ &alias);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ Alias_New, alias);
+ return _res;
+}
+
+static PyObject *File_NewAliasMinimalFromFullPath(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ char *fullPath__in__;
+ int fullPath__len__;
+ int fullPath__in_len__;
+ Str32 zoneName;
+ Str31 serverName;
+ AliasHandle alias;
+ if (!PyArg_ParseTuple(_args, "s#O&O&",
+ &fullPath__in__, &fullPath__in_len__,
+ PyMac_GetStr255, zoneName,
+ PyMac_GetStr255, serverName))
+ return NULL;
+ fullPath__len__ = fullPath__in_len__;
+ _err = NewAliasMinimalFromFullPath(fullPath__len__, fullPath__in__,
+ zoneName,
+ serverName,
+ &alias);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ Alias_New, alias);
+ return _res;
+}
+
+static PyObject *File_ResolveAliasFile(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec theSpec;
+ Boolean resolveAliasChains;
+ Boolean targetIsFolder;
+ Boolean wasAliased;
+ if (!PyArg_ParseTuple(_args, "O&b",
+ FSSpec_Convert, &theSpec,
+ &resolveAliasChains))
+ return NULL;
+ _err = ResolveAliasFile(&theSpec,
+ resolveAliasChains,
+ &targetIsFolder,
+ &wasAliased);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&bb",
+ FSSpec_New, &theSpec,
+ targetIsFolder,
+ wasAliased);
+ return _res;
+}
+
+static PyObject *File_ResolveAliasFileWithMountFlags(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec theSpec;
+ Boolean resolveAliasChains;
+ Boolean targetIsFolder;
+ Boolean wasAliased;
+ unsigned long mountFlags;
+ if (!PyArg_ParseTuple(_args, "O&bl",
+ FSSpec_Convert, &theSpec,
+ &resolveAliasChains,
+ &mountFlags))
+ return NULL;
+ _err = ResolveAliasFileWithMountFlags(&theSpec,
+ resolveAliasChains,
+ &targetIsFolder,
+ &wasAliased,
+ mountFlags);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&bb",
+ FSSpec_New, &theSpec,
+ targetIsFolder,
+ wasAliased);
+ return _res;
+}
+
+static PyObject *File_UpdateAlias(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec fromFile__buf__;
+ FSSpec *fromFile = &fromFile__buf__;
+ FSSpec target;
+ AliasHandle alias;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "O&O&O&",
+ myPyMac_GetOptFSSpecPtr, &fromFile,
+ FSSpec_Convert, &target,
+ Alias_Convert, &alias))
+ return NULL;
+ _err = UpdateAlias(fromFile,
+ &target,
+ alias,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("b",
+ wasChanged);
+ return _res;
+}
+
+static PyObject *File_ResolveAliasFileWithMountFlagsNoUI(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSSpec theSpec;
+ Boolean resolveAliasChains;
+ Boolean targetIsFolder;
+ Boolean wasAliased;
+ unsigned long mountFlags;
+ if (!PyArg_ParseTuple(_args, "O&bl",
+ FSSpec_Convert, &theSpec,
+ &resolveAliasChains,
+ &mountFlags))
+ return NULL;
+ _err = ResolveAliasFileWithMountFlagsNoUI(&theSpec,
+ resolveAliasChains,
+ &targetIsFolder,
+ &wasAliased,
+ mountFlags);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&bb",
+ FSSpec_New, &theSpec,
+ targetIsFolder,
+ wasAliased);
+ return _res;
+}
+
+static PyObject *File_FSNewAlias(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef fromFile__buf__;
+ FSRef *fromFile = &fromFile__buf__;
+ FSRef target;
+ AliasHandle inAlias;
+ if (!PyArg_ParseTuple(_args, "O&O&",
+ myPyMac_GetOptFSRefPtr, &fromFile,
+ FSRef_Convert, &target))
+ return NULL;
+ _err = FSNewAlias(fromFile,
+ &target,
+ &inAlias);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&",
+ Alias_New, inAlias);
+ return _res;
+}
+
+static PyObject *File_FSResolveAliasFileWithMountFlags(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef theRef;
+ Boolean resolveAliasChains;
+ Boolean targetIsFolder;
+ Boolean wasAliased;
+ unsigned long mountFlags;
+ if (!PyArg_ParseTuple(_args, "O&bl",
+ FSRef_Convert, &theRef,
+ &resolveAliasChains,
+ &mountFlags))
+ return NULL;
+ _err = FSResolveAliasFileWithMountFlags(&theRef,
+ resolveAliasChains,
+ &targetIsFolder,
+ &wasAliased,
+ mountFlags);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&bb",
+ FSRef_New, &theRef,
+ targetIsFolder,
+ wasAliased);
+ return _res;
+}
+
+static PyObject *File_FSResolveAliasFile(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef theRef;
+ Boolean resolveAliasChains;
+ Boolean targetIsFolder;
+ Boolean wasAliased;
+ if (!PyArg_ParseTuple(_args, "O&b",
+ FSRef_Convert, &theRef,
+ &resolveAliasChains))
+ return NULL;
+ _err = FSResolveAliasFile(&theRef,
+ resolveAliasChains,
+ &targetIsFolder,
+ &wasAliased);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("O&bb",
+ FSRef_New, &theRef,
+ targetIsFolder,
+ wasAliased);
+ return _res;
+}
+
+static PyObject *File_FSUpdateAlias(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+ OSErr _err;
+ FSRef fromFile__buf__;
+ FSRef *fromFile = &fromFile__buf__;
+ FSRef target;
+ AliasHandle alias;
+ Boolean wasChanged;
+ if (!PyArg_ParseTuple(_args, "O&O&O&",
+ myPyMac_GetOptFSRefPtr, &fromFile,
+ FSRef_Convert, &target,
+ Alias_Convert, &alias))
+ return NULL;
+ _err = FSUpdateAlias(fromFile,
+ &target,
+ alias,
+ &wasChanged);
+ if (_err != noErr) return PyMac_Error(_err);
+ _res = Py_BuildValue("b",
+ wasChanged);
+ return _res;
+}
+
+static PyObject *File_pathname(PyObject *_self, PyObject *_args)
+{
+ PyObject *_res = NULL;
+
+ PyObject *obj;
+
+ if (!PyArg_ParseTuple(_args, "O", &obj))
+ return NULL;
+ if (PyString_Check(obj)) {
+ Py_INCREF(obj);
+ return obj;
+ }
+ if (PyUnicode_Check(obj))
+ return PyUnicode_AsEncodedString(obj, "utf8", "strict");
+ _res = PyObject_CallMethod(obj, "as_pathname", NULL);
+ return _res;
+
+}
+
+static PyMethodDef File_methods[] = {
+ {"UnmountVol", (PyCFunction)File_UnmountVol, 1,
+ PyDoc_STR("(Str63 volName, short vRefNum) -> None")},
+ {"FlushVol", (PyCFunction)File_FlushVol, 1,
+ PyDoc_STR("(Str63 volName, short vRefNum) -> None")},
+ {"HSetVol", (PyCFunction)File_HSetVol, 1,
+ PyDoc_STR("(Str63 volName, short vRefNum, long dirID) -> None")},
+ {"FSClose", (PyCFunction)File_FSClose, 1,
+ PyDoc_STR("(short refNum) -> None")},
+ {"Allocate", (PyCFunction)File_Allocate, 1,
+ PyDoc_STR("(short refNum) -> (long count)")},
+ {"GetEOF", (PyCFunction)File_GetEOF, 1,
+ PyDoc_STR("(short refNum) -> (long logEOF)")},
+ {"SetEOF", (PyCFunction)File_SetEOF, 1,
+ PyDoc_STR("(short refNum, long logEOF) -> None")},
+ {"GetFPos", (PyCFunction)File_GetFPos, 1,
+ PyDoc_STR("(short refNum) -> (long filePos)")},
+ {"SetFPos", (PyCFunction)File_SetFPos, 1,
+ PyDoc_STR("(short refNum, short posMode, long posOff) -> None")},
+ {"GetVRefNum", (PyCFunction)File_GetVRefNum, 1,
+ PyDoc_STR("(short fileRefNum) -> (short vRefNum)")},
+ {"HGetVol", (PyCFunction)File_HGetVol, 1,
+ PyDoc_STR("(StringPtr volName) -> (short vRefNum, long dirID)")},
+ {"HOpen", (PyCFunction)File_HOpen, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
+ {"HOpenDF", (PyCFunction)File_HOpenDF, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
+ {"HOpenRF", (PyCFunction)File_HOpenRF, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, SInt8 permission) -> (short refNum)")},
+ {"AllocContig", (PyCFunction)File_AllocContig, 1,
+ PyDoc_STR("(short refNum) -> (long count)")},
+ {"HCreate", (PyCFunction)File_HCreate, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, OSType creator, OSType fileType) -> None")},
+ {"DirCreate", (PyCFunction)File_DirCreate, 1,
+ PyDoc_STR("(short vRefNum, long parentDirID, Str255 directoryName) -> (long createdDirID)")},
+ {"HDelete", (PyCFunction)File_HDelete, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
+ {"HGetFInfo", (PyCFunction)File_HGetFInfo, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> (FInfo fndrInfo)")},
+ {"HSetFInfo", (PyCFunction)File_HSetFInfo, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName, FInfo fndrInfo) -> None")},
+ {"HSetFLock", (PyCFunction)File_HSetFLock, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
+ {"HRstFLock", (PyCFunction)File_HRstFLock, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> None")},
+ {"HRename", (PyCFunction)File_HRename, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 oldName, Str255 newName) -> None")},
+ {"CatMove", (PyCFunction)File_CatMove, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 oldName, long newDirID, Str255 newName) -> None")},
+ {"FSMakeFSSpec", (PyCFunction)File_FSMakeFSSpec, 1,
+ PyDoc_STR("(short vRefNum, long dirID, Str255 fileName) -> (FSSpec spec)")},
+ {"FSGetForkPosition", (PyCFunction)File_FSGetForkPosition, 1,
+ PyDoc_STR("(SInt16 forkRefNum) -> (SInt64 position)")},
+ {"FSSetForkPosition", (PyCFunction)File_FSSetForkPosition, 1,
+ PyDoc_STR("(SInt16 forkRefNum, UInt16 positionMode, SInt64 positionOffset) -> None")},
+ {"FSGetForkSize", (PyCFunction)File_FSGetForkSize, 1,
+ PyDoc_STR("(SInt16 forkRefNum) -> (SInt64 forkSize)")},
+ {"FSSetForkSize", (PyCFunction)File_FSSetForkSize, 1,
+ PyDoc_STR("(SInt16 forkRefNum, UInt16 positionMode, SInt64 positionOffset) -> None")},
+ {"FSAllocateFork", (PyCFunction)File_FSAllocateFork, 1,
+ PyDoc_STR("(SInt16 forkRefNum, FSAllocationFlags flags, UInt16 positionMode, SInt64 positionOffset, UInt64 requestCount) -> (UInt64 actualCount)")},
+ {"FSFlushFork", (PyCFunction)File_FSFlushFork, 1,
+ PyDoc_STR("(SInt16 forkRefNum) -> None")},
+ {"FSCloseFork", (PyCFunction)File_FSCloseFork, 1,
+ PyDoc_STR("(SInt16 forkRefNum) -> None")},
+ {"FSGetDataForkName", (PyCFunction)File_FSGetDataForkName, 1,
+ PyDoc_STR("() -> (HFSUniStr255 dataForkName)")},
+ {"FSGetResourceForkName", (PyCFunction)File_FSGetResourceForkName, 1,
+ PyDoc_STR("() -> (HFSUniStr255 resourceForkName)")},
+ {"FSPathMakeRef", (PyCFunction)File_FSPathMakeRef, 1,
+ PyDoc_STR("(UInt8 * path) -> (FSRef ref, Boolean isDirectory)")},
+
+#if TARGET_API_MAC_OSX
+ {"FNNotifyByPath", (PyCFunction)File_FNNotifyByPath, 1,
+ PyDoc_STR("(UInt8 * path, FNMessage message, OptionBits flags) -> None")},
+#endif
+
+#if TARGET_API_MAC_OSX
+ {"FNNotifyAll", (PyCFunction)File_FNNotifyAll, 1,
+ PyDoc_STR("(FNMessage message, OptionBits flags) -> None")},
+#endif
+ {"NewAlias", (PyCFunction)File_NewAlias, 1,
+ PyDoc_STR("(FSSpec fromFile, FSSpec target) -> (AliasHandle alias)")},
+ {"NewAliasMinimalFromFullPath", (PyCFunction)File_NewAliasMinimalFromFullPath, 1,
+ PyDoc_STR("(Buffer fullPath, Str32 zoneName, Str31 serverName) -> (AliasHandle alias)")},
+ {"ResolveAliasFile", (PyCFunction)File_ResolveAliasFile, 1,
+ PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
+ {"ResolveAliasFileWithMountFlags", (PyCFunction)File_ResolveAliasFileWithMountFlags, 1,
+ PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
+ {"UpdateAlias", (PyCFunction)File_UpdateAlias, 1,
+ PyDoc_STR("(FSSpec fromFile, FSSpec target, AliasHandle alias) -> (Boolean wasChanged)")},
+ {"ResolveAliasFileWithMountFlagsNoUI", (PyCFunction)File_ResolveAliasFileWithMountFlagsNoUI, 1,
+ PyDoc_STR("(FSSpec theSpec, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSSpec theSpec, Boolean targetIsFolder, Boolean wasAliased)")},
+ {"FSNewAlias", (PyCFunction)File_FSNewAlias, 1,
+ PyDoc_STR("(FSRef fromFile, FSRef target) -> (AliasHandle inAlias)")},
+ {"FSResolveAliasFileWithMountFlags", (PyCFunction)File_FSResolveAliasFileWithMountFlags, 1,
+ PyDoc_STR("(FSRef theRef, Boolean resolveAliasChains, unsigned long mountFlags) -> (FSRef theRef, Boolean targetIsFolder, Boolean wasAliased)")},
+ {"FSResolveAliasFile", (PyCFunction)File_FSResolveAliasFile, 1,
+ PyDoc_STR("(FSRef theRef, Boolean resolveAliasChains) -> (FSRef theRef, Boolean targetIsFolder, Boolean wasAliased)")},
+ {"FSUpdateAlias", (PyCFunction)File_FSUpdateAlias, 1,
+ PyDoc_STR("(FSRef fromFile, FSRef target, AliasHandle alias) -> (Boolean wasChanged)")},
+ {"pathname", (PyCFunction)File_pathname, 1,
+ PyDoc_STR("(str|unicode|FSSpec|FSref) -> pathname")},
+ {NULL, NULL, 0}
+};
+
+
+
+int
+PyMac_GetFSSpec(PyObject *v, FSSpec *spec)
+{
+ Str255 path;
+ short refnum;
+ long parid;
+ OSErr err;
+ FSRef fsr;
+
+ if (FSSpec_Check(v)) {
+ *spec = ((FSSpecObject *)v)->ob_itself;
+ return 1;
+ }
+
+ if (PyArg_Parse(v, "(hlO&)",
+ &refnum, &parid, PyMac_GetStr255, &path)) {
+ err = FSMakeFSSpec(refnum, parid, path, spec);
+ if ( err && err != fnfErr ) {
+ PyMac_Error(err);
+ return 0;
+ }
+ return 1;
+ }
+ PyErr_Clear();
+#if !TARGET_API_MAC_OSX
+ /* On OS9 we now try a pathname */
+ if ( PyString_Check(v) ) {
+ /* It's a pathname */
+ if( !PyArg_Parse(v, "O&", PyMac_GetStr255, &path) )
+ return 0;
+ refnum = 0; /* XXXX Should get CurWD here?? */
+ parid = 0;
+ err = FSMakeFSSpec(refnum, parid, path, spec);
+ if ( err && err != fnfErr ) {
+ PyMac_Error(err);
+ return 0;
+ }
+ return 1;
+ }
+ PyErr_Clear();
+#endif
+ /* Otherwise we try to go via an FSRef. On OSX we go all the way,
+ ** on OS9 we accept only a real FSRef object
+ */
+#if TARGET_API_MAC_OSX
+ if ( PyMac_GetFSRef(v, &fsr) ) {
+#else
+ if (FSRef_Check(v)) {
+ fsr = ((FSRefObject *)v)->ob_itself;
+#endif
+ err = FSGetCatalogInfo(&fsr, kFSCatInfoNone, NULL, NULL, spec, NULL);
+ if (err != noErr) {
+ PyMac_Error(err);
+ return 0;
+ }
+ return 1;
+ }
+#if !TARGET_API_MAC_OSX
+ PyErr_SetString(PyExc_TypeError, "FSSpec, FSRef, pathname or (refnum, parid, path) required");
+#endif
+ return 0;
+}
+
+int
+PyMac_GetFSRef(PyObject *v, FSRef *fsr)
+{
+ OSStatus err;
+ FSSpec fss;
+
+ if (FSRef_Check(v)) {
+ *fsr = ((FSRefObject *)v)->ob_itself;
+ return 1;
+ }
+
+#if TARGET_API_MAC_OSX
+ /* On OSX we now try a pathname */
+ if ( PyString_Check(v) || PyUnicode_Check(v)) {
+ char *path = NULL;
+ if (!PyArg_Parse(v, "et", Py_FileSystemDefaultEncoding, &path))
+ return NULL;
+ if ( (err=FSPathMakeRef(path, fsr, NULL)) ) {
+ PyMac_Error(err);
+ return 0;
+ }
+ return 1;
+ }
+ /* XXXX Should try unicode here too */
+#endif
+ /* Otherwise we try to go via an FSSpec */
+#if TARGET_API_MAC_OSX
+ if (FSSpec_Check(v)) {
+ fss = ((FSSpecObject *)v)->ob_itself;
+#else
+ if (PyMac_GetFSSpec(v, &fss)) {
+#endif
+ if ((err=FSpMakeFSRef(&fss, fsr)) == 0)
+ return 1;
+ PyMac_Error(err);
+ return 0;
+ }
+ PyErr_SetString(PyExc_TypeError, "FSRef, FSSpec or pathname required");
+ return 0;
+}
+
+extern PyObject *
+PyMac_BuildFSSpec(FSSpec *spec)
+{
+ return FSSpec_New(spec);
+}
+
+extern PyObject *
+PyMac_BuildFSRef(FSRef *spec)
+{
+ return FSRef_New(spec);
+}
+
+
+void init_File(void)
+{
+ PyObject *m;
+ PyObject *d;
+
+
+
+ PyMac_INIT_TOOLBOX_OBJECT_NEW(FSSpec *, PyMac_BuildFSSpec);
+ PyMac_INIT_TOOLBOX_OBJECT_NEW(FSRef *, PyMac_BuildFSRef);
+ PyMac_INIT_TOOLBOX_OBJECT_CONVERT(FSSpec, PyMac_GetFSSpec);
+ PyMac_INIT_TOOLBOX_OBJECT_CONVERT(FSRef, PyMac_GetFSRef);
+
+
+ m = Py_InitModule("_File", File_methods);
+ d = PyModule_GetDict(m);
+ File_Error = PyMac_GetOSErrException();
+ if (File_Error == NULL ||
+ PyDict_SetItemString(d, "Error", File_Error) != 0)
+ return;
+ FSCatalogInfo_Type.ob_type = &PyType_Type;
+ if (PyType_Ready(&FSCatalogInfo_Type) < 0) return;
+ Py_INCREF(&FSCatalogInfo_Type);
+ PyModule_AddObject(m, "FSCatalogInfo", (PyObject *)&FSCatalogInfo_Type);
+ /* Backward-compatible name */
+ Py_INCREF(&FSCatalogInfo_Type);
+ PyModule_AddObject(m, "FSCatalogInfoType", (PyObject *)&FSCatalogInfo_Type);
+ FInfo_Type.ob_type = &PyType_Type;
+ if (PyType_Ready(&FInfo_Type) < 0) return;
+ Py_INCREF(&FInfo_Type);
+ PyModule_AddObject(m, "FInfo", (PyObject *)&FInfo_Type);
+ /* Backward-compatible name */
+ Py_INCREF(&FInfo_Type);
+ PyModule_AddObject(m, "FInfoType", (PyObject *)&FInfo_Type);
+ Alias_Type.ob_type = &PyType_Type;
+ if (PyType_Ready(&Alias_Type) < 0) return;
+ Py_INCREF(&Alias_Type);
+ PyModule_AddObject(m, "Alias", (PyObject *)&Alias_Type);
+ /* Backward-compatible name */
+ Py_INCREF(&Alias_Type);
+ PyModule_AddObject(m, "AliasType", (PyObject *)&Alias_Type);
+ FSSpec_Type.ob_type = &PyType_Type;
+ if (PyType_Ready(&FSSpec_Type) < 0) return;
+ Py_INCREF(&FSSpec_Type);
+ PyModule_AddObject(m, "FSSpec", (PyObject *)&FSSpec_Type);
+ /* Backward-compatible name */
+ Py_INCREF(&FSSpec_Type);
+ PyModule_AddObject(m, "FSSpecType", (PyObject *)&FSSpec_Type);
+ FSRef_Type.ob_type = &PyType_Type;
+ if (PyType_Ready(&FSRef_Type) < 0) return;
+ Py_INCREF(&FSRef_Type);
+ PyModule_AddObject(m, "FSRef", (PyObject *)&FSRef_Type);
+ /* Backward-compatible name */
+ Py_INCREF(&FSRef_Type);
+ PyModule_AddObject(m, "FSRefType", (PyObject *)&FSRef_Type);
+}
+
+/* ======================== End module _File ======================== */
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Actions for use in token specifications
+#
+#=======================================================================
+
+class Action:
+
+ def same_as(self, other):
+ return self is other
+
+
+class Return(Action):
+ """
+ Internal Plex action which causes |value| to
+ be returned as the value of the associated token
+ """
+
+ value = None
+
+ def __init__(self, value):
+ self.value = value
+
+ def perform(self, token_stream, text):
+ return self.value
+
+ def same_as(self, other):
+ return isinstance(other, Return) and self.value == other.value
+
+ def __repr__(self):
+ return "Return(%s)" % repr(self.value)
+
+
+class Call(Action):
+ """
+ Internal Plex action which causes a function to be called.
+ """
+
+ function = None
+
+ def __init__(self, function):
+ self.function = function
+
+ def perform(self, token_stream, text):
+ return self.function(token_stream, text)
+
+ def __repr__(self):
+ return "Call(%s)" % self.function.__name__
+
+ def same_as(self, other):
+ return isinstance(other, Call) and self.function is other.function
+
+
+class Begin(Action):
+ """
+ Begin(state_name) is a Plex action which causes the Scanner to
+ enter the state |state_name|. See the docstring of Plex.Lexicon
+ for more information.
+ """
+
+ state_name = None
+
+ def __init__(self, state_name):
+ self.state_name = state_name
+
+ def perform(self, token_stream, text):
+ token_stream.begin(self.state_name)
+
+ def __repr__(self):
+ return "Begin(%s)" % self.state_name
+
+ def same_as(self, other):
+ return isinstance(other, Begin) and self.state_name == other.state_name
+
+
+class Ignore(Action):
+ """
+ IGNORE is a Plex action which causes its associated token
+ to be ignored. See the docstring of Plex.Lexicon for more
+ information.
+ """
+ def perform(self, token_stream, text):
+ return None
+
+ def __repr__(self):
+ return "IGNORE"
+
+IGNORE = Ignore()
+IGNORE.__doc__ = Ignore.__doc__
+
+class Text(Action):
+ """
+ TEXT is a Plex action which causes the text of a token to
+ be returned as the value of the token. See the docstring of
+ Plex.Lexicon for more information.
+ """
+
+ def perform(self, token_stream, text):
+ return text
+
+ def __repr__(self):
+ return "TEXT"
+
+TEXT = Text()
+TEXT.__doc__ = Text.__doc__
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Converting NFA to DFA
+#
+#=======================================================================
+
+import Machines
+from Machines import LOWEST_PRIORITY
+from Transitions import TransitionMap
+
+def nfa_to_dfa(old_machine, debug = None):
+ """
+ Given a nondeterministic Machine, return a new equivalent
+ Machine which is deterministic.
+ """
+ # We build a new machine whose states correspond to sets of states
+ # in the old machine. Initially we add a new state corresponding to
+ # the epsilon-closure of each initial old state. Then we give transitions
+ # to each new state which are the union of all transitions out of any
+ # of the corresponding old states. The new state reached on a given
+ # character is the one corresponding to the set of states reachable
+ # on that character from any of the old states. As new combinations of
+ # old states are created, new states are added as needed until closure
+ # is reached.
+ new_machine = Machines.FastMachine()
+ state_map = StateMap(new_machine)
+ # Seed the process using the initial states of the old machine.
+ # Make the corresponding new states into initial states of the new
+ # machine with the same names.
+ for (key, old_state) in old_machine.initial_states.items():
+ new_state = state_map.old_to_new(epsilon_closure(old_state))
+ new_machine.make_initial_state(key, new_state)
+ # Tricky bit here: we add things to the end of this list while we're
+ # iterating over it. The iteration stops when closure is achieved.
+ for new_state in new_machine.states:
+ transitions = TransitionMap()
+ for old_state in state_map.new_to_old(new_state).keys():
+ for event, old_target_states in old_state.transitions.items():
+ if event and old_target_states:
+ transitions.add_set(event, set_epsilon_closure(old_target_states))
+ for event, old_states in transitions.items():
+ new_machine.add_transitions(new_state, event, state_map.old_to_new(old_states))
+ if debug:
+ debug.write("\n===== State Mapping =====\n")
+ state_map.dump(debug)
+ return new_machine
+
+def set_epsilon_closure(state_set):
+ """
+ Given a set of states, return the union of the epsilon
+ closures of its member states.
+ """
+ result = {}
+ for state1 in state_set.keys():
+ for state2 in epsilon_closure(state1).keys():
+ result[state2] = 1
+ return result
+
+def epsilon_closure(state):
+ """
+ Return the set of states reachable from the given state
+ by epsilon moves.
+ """
+ # Cache the result
+ result = state.epsilon_closure
+ if result is None:
+ result = {}
+ state.epsilon_closure = result
+ add_to_epsilon_closure(result, state)
+ return result
+
+def add_to_epsilon_closure(state_set, state):
+ """
+ Recursively add to |state_set| states reachable from the given state
+ by epsilon moves.
+ """
+ if not state_set.get(state, 0):
+ state_set[state] = 1
+ state_set_2 = state.transitions.get_epsilon()
+ if state_set_2:
+ for state2 in state_set_2.keys():
+ add_to_epsilon_closure(state_set, state2)
+
+class StateMap:
+ """
+ Helper class used by nfa_to_dfa() to map back and forth between
+ sets of states from the old machine and states of the new machine.
+ """
+ new_machine = None # Machine
+ old_to_new_dict = None # {(old_state,...) : new_state}
+ new_to_old_dict = None # {id(new_state) : old_state_set}
+
+ def __init__(self, new_machine):
+ self.new_machine = new_machine
+ self.old_to_new_dict = {}
+ self.new_to_old_dict= {}
+
+ def old_to_new(self, old_state_set):
+ """
+ Return the state of the new machine corresponding to the
+ set of old machine states represented by |state_set|. A new
+ state will be created if necessary. If any of the old states
+ are accepting states, the new state will be an accepting state
+ with the highest priority action from the old states.
+ """
+ key = self.make_key(old_state_set)
+ new_state = self.old_to_new_dict.get(key, None)
+ if not new_state:
+ action = self.highest_priority_action(old_state_set)
+ new_state = self.new_machine.new_state(action)
+ self.old_to_new_dict[key] = new_state
+ self.new_to_old_dict[id(new_state)] = old_state_set
+ #for old_state in old_state_set.keys():
+ #new_state.merge_actions(old_state)
+ return new_state
+
+ def highest_priority_action(self, state_set):
+ best_action = None
+ best_priority = LOWEST_PRIORITY
+ for state in state_set.keys():
+ priority = state.action_priority
+ if priority > best_priority:
+ best_action = state.action
+ best_priority = priority
+ return best_action
+
+# def old_to_new_set(self, old_state_set):
+# """
+# Return the new state corresponding to a set of old states as
+# a singleton set.
+# """
+# return {self.old_to_new(old_state_set):1}
+
+ def new_to_old(self, new_state):
+ """Given a new state, return a set of corresponding old states."""
+ return self.new_to_old_dict[id(new_state)]
+
+ def make_key(self, state_set):
+ """
+ Convert a set of states into a uniquified
+ sorted tuple suitable for use as a dictionary key.
+ """
+ lst = state_set.keys()
+ lst.sort()
+ return tuple(lst)
+
+ def dump(self, file):
+ from Transitions import state_set_str
+ for new_state in self.new_machine.states:
+ old_state_set = self.new_to_old_dict[id(new_state)]
+ file.write(" State %s <-- %s\n" % (
+ new_state['number'], state_set_str(old_state_set)))
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Exception classes
+#
+#=======================================================================
+
+import exceptions
+
+class PlexError(exceptions.Exception):
+ message = ""
+
+class PlexTypeError(PlexError, TypeError):
+ pass
+
+class PlexValueError(PlexError, ValueError):
+ pass
+
+class InvalidRegex(PlexError):
+ pass
+
+class InvalidToken(PlexError):
+
+ def __init__(self, token_number, message):
+ PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
+
+class InvalidScanner(PlexError):
+ pass
+
+class AmbiguousAction(PlexError):
+ message = "Two tokens with different actions can match the same string"
+
+ def __init__(self):
+ pass
+
+class UnrecognizedInput(PlexError):
+ scanner = None
+ position = None
+ state_name = None
+
+ def __init__(self, scanner, state_name):
+ self.scanner = scanner
+ self.position = scanner.position()
+ self.state_name = state_name
+
+ def __str__(self):
+ return ("'%s', line %d, char %d: Token not recognised in state %s"
+ % (self.position + (repr(self.state_name),)))
+
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Lexical Analyser Specification
+#
+#=======================================================================
+
+import types
+
+import Actions
+import DFA
+import Errors
+import Machines
+import Regexps
+
+# debug_flags for Lexicon constructor
+DUMP_NFA = 1
+DUMP_DFA = 2
+
+class State:
+ """
+ This class is used as part of a Plex.Lexicon specification to
+ introduce a user-defined state.
+
+ Constructor:
+
+ State(name, token_specifications)
+ """
+
+ name = None
+ tokens = None
+
+ def __init__(self, name, tokens):
+ self.name = name
+ self.tokens = tokens
+
+class Lexicon:
+ """
+ Lexicon(specification) builds a lexical analyser from the given
+ |specification|. The specification consists of a list of
+ specification items. Each specification item may be either:
+
+ 1) A token definition, which is a tuple:
+
+ (pattern, action)
+
+ The |pattern| is a regular axpression built using the
+ constructors defined in the Plex module.
+
+ The |action| is the action to be performed when this pattern
+ is recognised (see below).
+
+ 2) A state definition:
+
+ State(name, tokens)
+
+ where |name| is a character string naming the state,
+ and |tokens| is a list of token definitions as
+ above. The meaning and usage of states is described
+ below.
+
+ Actions
+ -------
+
+ The |action| in a token specication may be one of three things:
+
+ 1) A function, which is called as follows:
+
+ function(scanner, text)
+
+ where |scanner| is the relevant Scanner instance, and |text|
+ is the matched text. If the function returns anything
+ other than None, that value is returned as the value of the
+ token. If it returns None, scanning continues as if the IGNORE
+ action were specified (see below).
+
+ 2) One of the following special actions:
+
+ IGNORE means that the recognised characters will be treated as
+ white space and ignored. Scanning will continue until
+ the next non-ignored token is recognised before returning.
+
+ TEXT causes the scanned text itself to be returned as the
+ value of the token.
+
+ 3) Any other value, which is returned as the value of the token.
+
+ States
+ ------
+
+ At any given time, the scanner is in one of a number of states.
+ Associated with each state is a set of possible tokens. When scanning,
+ only tokens associated with the current state are recognised.
+
+ There is a default state, whose name is the empty string. Token
+ definitions which are not inside any State definition belong to
+ the default state.
+
+ The initial state of the scanner is the default state. The state can
+ be changed in one of two ways:
+
+ 1) Using Begin(state_name) as the action of a token.
+
+ 2) Calling the begin(state_name) method of the Scanner.
+
+ To change back to the default state, use '' as the state name.
+ """
+
+ machine = None # Machine
+ tables = None # StateTableMachine
+
+ def __init__(self, specifications, debug = None, debug_flags = 7, timings = None):
+ if type(specifications) <> types.ListType:
+ raise Errors.InvalidScanner("Scanner definition is not a list")
+ if timings:
+ from Timing import time
+ total_time = 0.0
+ time1 = time()
+ nfa = Machines.Machine()
+ default_initial_state = nfa.new_initial_state('')
+ token_number = 1
+ for spec in specifications:
+ if isinstance(spec, State):
+ user_initial_state = nfa.new_initial_state(spec.name)
+ for token in spec.tokens:
+ self.add_token_to_machine(
+ nfa, user_initial_state, token, token_number)
+ token_number = token_number + 1
+ elif type(spec) == types.TupleType:
+ self.add_token_to_machine(
+ nfa, default_initial_state, spec, token_number)
+ token_number = token_number + 1
+ else:
+ raise Errors.InvalidToken(
+ token_number,
+ "Expected a token definition (tuple) or State instance")
+ if timings:
+ time2 = time()
+ total_time = total_time + (time2 - time1)
+ time3 = time()
+ if debug and (debug_flags & 1):
+ debug.write("\n============= NFA ===========\n")
+ nfa.dump(debug)
+ dfa = DFA.nfa_to_dfa(nfa, debug = (debug_flags & 3) == 3 and debug)
+ if timings:
+ time4 = time()
+ total_time = total_time + (time4 - time3)
+ if debug and (debug_flags & 2):
+ debug.write("\n============= DFA ===========\n")
+ dfa.dump(debug)
+ if timings:
+ timings.write("Constructing NFA : %5.2f\n" % (time2 - time1))
+ timings.write("Converting to DFA: %5.2f\n" % (time4 - time3))
+ timings.write("TOTAL : %5.2f\n" % total_time)
+ self.machine = dfa
+
+ def add_token_to_machine(self, machine, initial_state, token_spec, token_number):
+ try:
+ (re, action_spec) = self.parse_token_definition(token_spec)
+ # Disabled this -- matching empty strings can be useful
+ #if re.nullable:
+ # raise Errors.InvalidToken(
+ # token_number, "Pattern can match 0 input symbols")
+ if isinstance(action_spec, Actions.Action):
+ action = action_spec
+ elif callable(action_spec):
+ action = Actions.Call(action_spec)
+ else:
+ action = Actions.Return(action_spec)
+ final_state = machine.new_state()
+ re.build_machine(machine, initial_state, final_state,
+ match_bol = 1, nocase = 0)
+ final_state.set_action(action, priority = -token_number)
+ except Errors.PlexError, e:
+ raise e.__class__("Token number %d: %s" % (token_number, e))
+
+ def parse_token_definition(self, token_spec):
+ if type(token_spec) <> types.TupleType:
+ raise Errors.InvalidToken("Token definition is not a tuple")
+ if len(token_spec) <> 2:
+ raise Errors.InvalidToken("Wrong number of items in token definition")
+ pattern, action = token_spec
+ if not isinstance(pattern, Regexps.RE):
+ raise Errors.InvalidToken("Pattern is not an RE instance")
+ return (pattern, action)
+
+ def get_initial_state(self, name):
+ return self.machine.get_initial_state(name)
+
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Classes for building NFAs and DFAs
+#
+#=======================================================================
+
+import string
+import sys
+from sys import maxint
+from types import TupleType
+
+from Transitions import TransitionMap
+
+LOWEST_PRIORITY = -sys.maxint
+
+class Machine:
+ """A collection of Nodes representing an NFA or DFA."""
+ states = None # [Node]
+ next_state_number = 1
+ initial_states = None # {(name, bol): Node}
+
+ def __init__(self):
+ self.states = []
+ self.initial_states = {}
+
+ def __del__(self):
+ #print "Destroying", self ###
+ for state in self.states:
+ state.destroy()
+
+ def new_state(self):
+ """Add a new state to the machine and return it."""
+ s = Node()
+ n = self.next_state_number
+ self.next_state_number = n + 1
+ s.number = n
+ self.states.append(s)
+ return s
+
+ def new_initial_state(self, name):
+ state = self.new_state()
+ self.make_initial_state(name, state)
+ return state
+
+ def make_initial_state(self, name, state):
+ self.initial_states[name] = state
+
+ def get_initial_state(self, name):
+ return self.initial_states[name]
+
+ def dump(self, file):
+ file.write("Plex.Machine:\n")
+ if self.initial_states is not None:
+ file.write(" Initial states:\n")
+ for (name, state) in self.initial_states.items():
+ file.write(" '%s': %d\n" % (name, state.number))
+ for s in self.states:
+ s.dump(file)
+
+class Node:
+ """A state of an NFA or DFA."""
+ transitions = None # TransitionMap
+ action = None # Action
+ action_priority = None # integer
+ number = 0 # for debug output
+ epsilon_closure = None # used by nfa_to_dfa()
+
+ def __init__(self):
+ # Preinitialise the list of empty transitions, because
+ # the nfa-to-dfa algorithm needs it
+ #self.transitions = {'':[]}
+ self.transitions = TransitionMap()
+ self.action_priority = LOWEST_PRIORITY
+
+ def destroy(self):
+ #print "Destroying", self ###
+ self.transitions = None
+ self.action = None
+ self.epsilon_closure = None
+
+ def add_transition(self, event, new_state):
+ self.transitions.add(event, new_state)
+
+ def link_to(self, state):
+ """Add an epsilon-move from this state to another state."""
+ self.add_transition('', state)
+
+ def set_action(self, action, priority):
+ """Make this an accepting state with the given action. If
+ there is already an action, choose the action with highest
+ priority."""
+ if priority > self.action_priority:
+ self.action = action
+ self.action_priority = priority
+
+ def get_action(self):
+ return self.action
+
+ def get_action_priority(self):
+ return self.action_priority
+
+# def merge_actions(self, other_state):
+# """Merge actions of other state into this state according
+# to their priorities."""
+# action = other_state.get_action()
+# priority = other_state.get_action_priority()
+# self.set_action(action, priority)
+
+ def is_accepting(self):
+ return self.action is not None
+
+ def __str__(self):
+ return "State %d" % self.number
+
+ def dump(self, file):
+ import string
+ # Header
+ file.write(" State %d:\n" % self.number)
+ # Transitions
+# self.dump_transitions(file)
+ self.transitions.dump(file)
+ # Action
+ action = self.action
+ priority = self.action_priority
+ if action is not None:
+ file.write(" %s [priority %d]\n" % (action, priority))
+
+
+class FastMachine:
+ """
+ FastMachine is a deterministic machine represented in a way that
+ allows fast scanning.
+ """
+ initial_states = None # {state_name:state}
+ states = None # [state]
+ # where state = {event:state, 'else':state, 'action':Action}
+ next_number = 1 # for debugging
+
+ new_state_template = {
+ '':None, 'bol':None, 'eol':None, 'eof':None, 'else':None
+ }
+
+ def __init__(self, old_machine = None):
+ self.initial_states = initial_states = {}
+ self.states = []
+ if old_machine:
+ self.old_to_new = old_to_new = {}
+ for old_state in old_machine.states:
+ new_state = self.new_state()
+ old_to_new[old_state] = new_state
+ for name, old_state in old_machine.initial_states.items():
+ initial_states[name] = old_to_new[old_state]
+ for old_state in old_machine.states:
+ new_state = old_to_new[old_state]
+ for event, old_state_set in old_state.transitions.items():
+ if old_state_set:
+ new_state[event] = old_to_new[old_state_set.keys()[0]]
+ else:
+ new_state[event] = None
+ new_state['action'] = old_state.action
+
+ def __del__(self):
+ for state in self.states:
+ state.clear()
+
+ def new_state(self, action = None):
+ number = self.next_number
+ self.next_number = number + 1
+ result = self.new_state_template.copy()
+ result['number'] = number
+ result['action'] = action
+ self.states.append(result)
+ return result
+
+ def make_initial_state(self, name, state):
+ self.initial_states[name] = state
+
+ def add_transitions(self, state, event, new_state):
+ if type(event) == TupleType:
+ code0, code1 = event
+ if code0 == -maxint:
+ state['else'] = new_state
+ elif code1 <> maxint:
+ while code0 < code1:
+ state[chr(code0)] = new_state
+ code0 = code0 + 1
+ else:
+ state[event] = new_state
+
+ def get_initial_state(self, name):
+ return self.initial_states[name]
+
+ def dump(self, file):
+ file.write("Plex.FastMachine:\n")
+ file.write(" Initial states:\n")
+ for name, state in self.initial_states.items():
+ file.write(" %s: %s\n" % (repr(name), state['number']))
+ for state in self.states:
+ self.dump_state(state, file)
+
+ def dump_state(self, state, file):
+ import string
+ # Header
+ file.write(" State %d:\n" % state['number'])
+ # Transitions
+ self.dump_transitions(state, file)
+ # Action
+ action = state['action']
+ if action is not None:
+ file.write(" %s\n" % action)
+
+ def dump_transitions(self, state, file):
+ chars_leading_to_state = {}
+ special_to_state = {}
+ for (c, s) in state.items():
+ if len(c) == 1:
+ chars = chars_leading_to_state.get(id(s), None)
+ if chars is None:
+ chars = []
+ chars_leading_to_state[id(s)] = chars
+ chars.append(c)
+ elif len(c) <= 4:
+ special_to_state[c] = s
+ ranges_to_state = {}
+ for state in self.states:
+ char_list = chars_leading_to_state.get(id(state), None)
+ if char_list:
+ ranges = self.chars_to_ranges(char_list)
+ ranges_to_state[ranges] = state
+ ranges_list = ranges_to_state.keys()
+ ranges_list.sort()
+ for ranges in ranges_list:
+ key = self.ranges_to_string(ranges)
+ state = ranges_to_state[ranges]
+ file.write(" %s --> State %d\n" % (key, state['number']))
+ for key in ('bol', 'eol', 'eof', 'else'):
+ state = special_to_state.get(key, None)
+ if state:
+ file.write(" %s --> State %d\n" % (key, state['number']))
+
+ def chars_to_ranges(self, char_list):
+ char_list.sort()
+ i = 0
+ n = len(char_list)
+ result = []
+ while i < n:
+ c1 = ord(char_list[i])
+ c2 = c1
+ i = i + 1
+ while i < n and ord(char_list[i]) == c2 + 1:
+ i = i + 1
+ c2 = c2 + 1
+ result.append((chr(c1), chr(c2)))
+ return tuple(result)
+
+ def ranges_to_string(self, range_list):
+ return string.join(map(self.range_to_string, range_list), ",")
+
+ def range_to_string(self, (c1, c2)):
+ if c1 == c2:
+ return repr(c1)
+ else:
+ return "%s..%s" % (repr(c1), repr(c2))
+##
+## (Superseded by Machines.FastMachine)
+##
+## class StateTableMachine:
+## """
+## StateTableMachine is an alternative representation of a Machine
+## that can be run more efficiently.
+## """
+## initial_states = None # {state_name:state_index}
+## states = None # [([state] indexed by char code, Action)]
+
+## special_map = {'bol':256, 'eol':257, 'eof':258}
+
+## def __init__(self, m):
+## """
+## Initialise StateTableMachine from Machine |m|.
+## """
+## initial_states = self.initial_states = {}
+## states = self.states = [None]
+## old_to_new = {}
+## i = 1
+## for old_state in m.states:
+## new_state = ([0] * 259, old_state.get_action())
+## states.append(new_state)
+## old_to_new[old_state] = i # new_state
+## i = i + 1
+## for name, old_state in m.initial_states.items():
+## initial_states[name] = old_to_new[old_state]
+## for old_state in m.states:
+## new_state_index = old_to_new[old_state]
+## new_table = states[new_state_index][0]
+## transitions = old_state.transitions
+## for c, old_targets in transitions.items():
+## if old_targets:
+## old_target = old_targets[0]
+## new_target_index = old_to_new[old_target]
+## if len(c) == 1:
+## a = ord(c)
+## else:
+## a = self.special_map[c]
+## new_table[a] = states[new_target_index]
+
+## def dump(self, f):
+## f.write("Plex.StateTableMachine:\n")
+## f.write(" Initial states:\n")
+## for name, index in self.initial_states.items():
+## f.write(" %s: State %d\n" % (
+## repr(name), id(self.states[index])))
+## for i in xrange(1, len(self.states)):
+## table, action = self.states[i]
+## f.write(" State %d:" % i)
+## if action:
+## f.write("%s" % action)
+## f.write("\n")
+## f.write(" %s\n" % map(id,table))
+
+
+
+
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Regular Expressions
+#
+#=======================================================================
+
+import array
+import string
+import types
+from sys import maxint
+
+import Errors
+
+#
+# Constants
+#
+
+BOL = 'bol'
+EOL = 'eol'
+EOF = 'eof'
+
+nl_code = ord('\n')
+
+#
+# Helper functions
+#
+
+def chars_to_ranges(s):
+ """
+ Return a list of character codes consisting of pairs
+ [code1a, code1b, code2a, code2b,...] which cover all
+ the characters in |s|.
+ """
+ char_list = list(s)
+ char_list.sort()
+ i = 0
+ n = len(char_list)
+ result = []
+ while i < n:
+ code1 = ord(char_list[i])
+ code2 = code1 + 1
+ i = i + 1
+ while i < n and code2 >= ord(char_list[i]):
+ code2 = code2 + 1
+ i = i + 1
+ result.append(code1)
+ result.append(code2)
+ return result
+
+def uppercase_range(code1, code2):
+ """
+ If the range of characters from code1 to code2-1 includes any
+ lower case letters, return the corresponding upper case range.
+ """
+ code3 = max(code1, ord('a'))
+ code4 = min(code2, ord('z') + 1)
+ if code3 < code4:
+ d = ord('A') - ord('a')
+ return (code3 + d, code4 + d)
+ else:
+ return None
+
+def lowercase_range(code1, code2):
+ """
+ If the range of characters from code1 to code2-1 includes any
+ upper case letters, return the corresponding lower case range.
+ """
+ code3 = max(code1, ord('A'))
+ code4 = min(code2, ord('Z') + 1)
+ if code3 < code4:
+ d = ord('a') - ord('A')
+ return (code3 + d, code4 + d)
+ else:
+ return None
+
+def CodeRanges(code_list):
+ """
+ Given a list of codes as returned by chars_to_ranges, return
+ an RE which will match a character in any of the ranges.
+ """
+ re_list = []
+ for i in xrange(0, len(code_list), 2):
+ re_list.append(CodeRange(code_list[i], code_list[i + 1]))
+ return apply(Alt, tuple(re_list))
+
+def CodeRange(code1, code2):
+ """
+ CodeRange(code1, code2) is an RE which matches any character
+ with a code |c| in the range |code1| <= |c| < |code2|.
+ """
+ if code1 <= nl_code < code2:
+ return Alt(RawCodeRange(code1, nl_code),
+ RawNewline,
+ RawCodeRange(nl_code + 1, code2))
+ else:
+ return RawCodeRange(code1, code2)
+
+#
+# Abstract classes
+#
+
+class RE:
+ """RE is the base class for regular expression constructors.
+ The following operators are defined on REs:
+
+ re1 + re2 is an RE which matches |re1| followed by |re2|
+ re1 | re2 is an RE which matches either |re1| or |re2|
+ """
+
+ nullable = 1 # True if this RE can match 0 input symbols
+ match_nl = 1 # True if this RE can match a string ending with '\n'
+ str = None # Set to a string to override the class's __str__ result
+
+ def build_machine(self, machine, initial_state, final_state,
+ match_bol, nocase):
+ """
+ This method should add states to |machine| to implement this
+ RE, starting at |initial_state| and ending at |final_state|.
+ If |match_bol| is true, the RE must be able to match at the
+ beginning of a line. If nocase is true, upper and lower case
+ letters should be treated as equivalent.
+ """
+ raise exceptions.UnimplementedMethod("%s.build_machine not implemented" %
+ self.__class__.__name__)
+
+ def build_opt(self, m, initial_state, c):
+ """
+ Given a state |s| of machine |m|, return a new state
+ reachable from |s| on character |c| or epsilon.
+ """
+ s = m.new_state()
+ initial_state.link_to(s)
+ initial_state.add_transition(c, s)
+ return s
+
+ def __add__(self, other):
+ return Seq(self, other)
+
+ def __or__(self, other):
+ return Alt(self, other)
+
+ def __str__(self):
+ if self.str:
+ return self.str
+ else:
+ return self.calc_str()
+
+ def check_re(self, num, value):
+ if not isinstance(value, RE):
+ self.wrong_type(num, value, "Plex.RE instance")
+
+ def check_string(self, num, value):
+ if type(value) <> type(''):
+ self.wrong_type(num, value, "string")
+
+ def check_char(self, num, value):
+ self.check_string(num, value)
+ if len(value) <> 1:
+ raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s."
+ "Expected a string of length 1, got: %s" % (
+ num, self.__class__.__name__, repr(value)))
+
+ def wrong_type(self, num, value, expected):
+ if type(value) == types.InstanceType:
+ got = "%s.%s instance" % (
+ value.__class__.__module__, value.__class__.__name__)
+ else:
+ got = type(value).__name__
+ raise Errors.PlexTypeError("Invalid type for argument %d of Plex.%s "
+ "(expected %s, got %s" % (
+ num, self.__class__.__name__, expected, got))
+
+#
+# Primitive RE constructors
+# -------------------------
+#
+# These are the basic REs from which all others are built.
+#
+
+## class Char(RE):
+## """
+## Char(c) is an RE which matches the character |c|.
+## """
+
+## nullable = 0
+
+## def __init__(self, char):
+## self.char = char
+## self.match_nl = char == '\n'
+
+## def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+## c = self.char
+## if match_bol and c <> BOL:
+## s1 = self.build_opt(m, initial_state, BOL)
+## else:
+## s1 = initial_state
+## if c == '\n' or c == EOF:
+## s1 = self.build_opt(m, s1, EOL)
+## if len(c) == 1:
+## code = ord(self.char)
+## s1.add_transition((code, code+1), final_state)
+## if nocase and is_letter_code(code):
+## code2 = other_case_code(code)
+## s1.add_transition((code2, code2+1), final_state)
+## else:
+## s1.add_transition(c, final_state)
+
+## def calc_str(self):
+## return "Char(%s)" % repr(self.char)
+
+def Char(c):
+ """
+ Char(c) is an RE which matches the character |c|.
+ """
+ if len(c) == 1:
+ result = CodeRange(ord(c), ord(c) + 1)
+ else:
+ result = SpecialSymbol(c)
+ result.str = "Char(%s)" % repr(c)
+ return result
+
+class RawCodeRange(RE):
+ """
+ RawCodeRange(code1, code2) is a low-level RE which matches any character
+ with a code |c| in the range |code1| <= |c| < |code2|, where the range
+ does not include newline. For internal use only.
+ """
+ nullable = 0
+ match_nl = 0
+ range = None # (code, code)
+ uppercase_range = None # (code, code) or None
+ lowercase_range = None # (code, code) or None
+
+ def __init__(self, code1, code2):
+ self.range = (code1, code2)
+ self.uppercase_range = uppercase_range(code1, code2)
+ self.lowercase_range = lowercase_range(code1, code2)
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ if match_bol:
+ initial_state = self.build_opt(m, initial_state, BOL)
+ initial_state.add_transition(self.range, final_state)
+ if nocase:
+ if self.uppercase_range:
+ initial_state.add_transition(self.uppercase_range, final_state)
+ if self.lowercase_range:
+ initial_state.add_transition(self.lowercase_range, final_state)
+
+ def calc_str(self):
+ return "CodeRange(%d,%d)" % (self.code1, self.code2)
+
+class _RawNewline(RE):
+ """
+ RawNewline is a low-level RE which matches a newline character.
+ For internal use only.
+ """
+ nullable = 0
+ match_nl = 1
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ if match_bol:
+ initial_state = self.build_opt(m, initial_state, BOL)
+ s = self.build_opt(m, initial_state, EOL)
+ s.add_transition((nl_code, nl_code + 1), final_state)
+
+RawNewline = _RawNewline()
+
+
+class SpecialSymbol(RE):
+ """
+ SpecialSymbol(sym) is an RE which matches the special input
+ symbol |sym|, which is one of BOL, EOL or EOF.
+ """
+ nullable = 0
+ match_nl = 0
+ sym = None
+
+ def __init__(self, sym):
+ self.sym = sym
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ # Sequences 'bol bol' and 'bol eof' are impossible, so only need
+ # to allow for bol if sym is eol
+ if match_bol and self.sym == EOL:
+ initial_state = self.build_opt(m, initial_state, BOL)
+ initial_state.add_transition(self.sym, final_state)
+
+
+class Seq(RE):
+ """Seq(re1, re2, re3...) is an RE which matches |re1| followed by
+ |re2| followed by |re3|..."""
+
+ def __init__(self, *re_list):
+ nullable = 1
+ for i in xrange(len(re_list)):
+ re = re_list[i]
+ self.check_re(i, re)
+ nullable = nullable and re.nullable
+ self.re_list = re_list
+ self.nullable = nullable
+ i = len(re_list)
+ match_nl = 0
+ while i:
+ i = i - 1
+ re = re_list[i]
+ if re.match_nl:
+ match_nl = 1
+ break
+ if not re.nullable:
+ break
+ self.match_nl = match_nl
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ re_list = self.re_list
+ if len(re_list) == 0:
+ initial_state.link_to(final_state)
+ else:
+ s1 = initial_state
+ n = len(re_list)
+ for i in xrange(n):
+ if i < n - 1:
+ s2 = m.new_state()
+ else:
+ s2 = final_state
+ re = re_list[i]
+ re.build_machine(m, s1, s2, match_bol, nocase)
+ s1 = s2
+ match_bol = re.match_nl or (match_bol and re.nullable)
+
+ def calc_str(self):
+ return "Seq(%s)" % string.join(map(str, self.re_list), ",")
+
+
+class Alt(RE):
+ """Alt(re1, re2, re3...) is an RE which matches either |re1| or
+ |re2| or |re3|..."""
+
+ def __init__(self, *re_list):
+ self.re_list = re_list
+ nullable = 0
+ match_nl = 0
+ nullable_res = []
+ non_nullable_res = []
+ i = 1
+ for re in re_list:
+ self.check_re(i, re)
+ if re.nullable:
+ nullable_res.append(re)
+ nullable = 1
+ else:
+ non_nullable_res.append(re)
+ if re.match_nl:
+ match_nl = 1
+ i = i + 1
+ self.nullable_res = nullable_res
+ self.non_nullable_res = non_nullable_res
+ self.nullable = nullable
+ self.match_nl = match_nl
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ for re in self.nullable_res:
+ re.build_machine(m, initial_state, final_state, match_bol, nocase)
+ if self.non_nullable_res:
+ if match_bol:
+ initial_state = self.build_opt(m, initial_state, BOL)
+ for re in self.non_nullable_res:
+ re.build_machine(m, initial_state, final_state, 0, nocase)
+
+ def calc_str(self):
+ return "Alt(%s)" % string.join(map(str, self.re_list), ",")
+
+
+class Rep1(RE):
+ """Rep1(re) is an RE which matches one or more repetitions of |re|."""
+
+ def __init__(self, re):
+ self.check_re(1, re)
+ self.re = re
+ self.nullable = re.nullable
+ self.match_nl = re.match_nl
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ s1 = m.new_state()
+ s2 = m.new_state()
+ initial_state.link_to(s1)
+ self.re.build_machine(m, s1, s2, match_bol or self.re.match_nl, nocase)
+ s2.link_to(s1)
+ s2.link_to(final_state)
+
+ def calc_str(self):
+ return "Rep1(%s)" % self.re
+
+
+class SwitchCase(RE):
+ """
+ SwitchCase(re, nocase) is an RE which matches the same strings as RE,
+ but treating upper and lower case letters according to |nocase|. If
+ |nocase| is true, case is ignored, otherwise it is not.
+ """
+ re = None
+ nocase = None
+
+ def __init__(self, re, nocase):
+ self.re = re
+ self.nocase = nocase
+ self.nullable = re.nullable
+ self.match_nl = re.match_nl
+
+ def build_machine(self, m, initial_state, final_state, match_bol, nocase):
+ self.re.build_machine(m, initial_state, final_state, match_bol,
+ self.nocase)
+
+ def calc_str(self):
+ if self.nocase:
+ name = "NoCase"
+ else:
+ name = "Case"
+ return "%s(%s)" % (name, self.re)
+
+#
+# Composite RE constructors
+# -------------------------
+#
+# These REs are defined in terms of the primitive REs.
+#
+
+Empty = Seq()
+Empty.__doc__ = \
+ """
+ Empty is an RE which matches the empty string.
+ """
+Empty.str = "Empty"
+
+def Str1(s):
+ """
+ Str1(s) is an RE which matches the literal string |s|.
+ """
+ result = apply(Seq, tuple(map(Char, s)))
+ result.str = "Str(%s)" % repr(s)
+ return result
+
+def Str(*strs):
+ """
+ Str(s) is an RE which matches the literal string |s|.
+ Str(s1, s2, s3, ...) is an RE which matches any of |s1| or |s2| or |s3|...
+ """
+ if len(strs) == 1:
+ return Str1(strs[0])
+ else:
+ result = apply(Alt, tuple(map(Str1, strs)))
+ result.str = "Str(%s)" % string.join(map(repr, strs), ",")
+ return result
+
+def Any(s):
+ """
+ Any(s) is an RE which matches any character in the string |s|.
+ """
+ #result = apply(Alt, tuple(map(Char, s)))
+ result = CodeRanges(chars_to_ranges(s))
+ result.str = "Any(%s)" % repr(s)
+ return result
+
+def AnyBut(s):
+ """
+ AnyBut(s) is an RE which matches any character (including
+ newline) which is not in the string |s|.
+ """
+ ranges = chars_to_ranges(s)
+ ranges.insert(0, -maxint)
+ ranges.append(maxint)
+ result = CodeRanges(ranges)
+ result.str = "AnyBut(%s)" % repr(s)
+ return result
+
+AnyChar = AnyBut("")
+AnyChar.__doc__ = \
+ """
+ AnyChar is an RE which matches any single character (including a newline).
+ """
+AnyChar.str = "AnyChar"
+
+def Range(s1, s2 = None):
+ """
+ Range(c1, c2) is an RE which matches any single character in the range
+ |c1| to |c2| inclusive.
+ Range(s) where |s| is a string of even length is an RE which matches
+ any single character in the ranges |s[0]| to |s[1]|, |s[2]| to |s[3]|,...
+ """
+ if s2:
+ result = CodeRange(ord(s1), ord(s2) + 1)
+ result.str = "Range(%s,%s)" % (s1, s2)
+ else:
+ ranges = []
+ for i in range(0, len(s1), 2):
+ ranges.append(CodeRange(ord(s1[i]), ord(s1[i+1]) + 1))
+ result = apply(Alt, tuple(ranges))
+ result.str = "Range(%s)" % repr(s1)
+ return result
+
+def Opt(re):
+ """
+ Opt(re) is an RE which matches either |re| or the empty string.
+ """
+ result = Alt(re, Empty)
+ result.str = "Opt(%s)" % re
+ return result
+
+def Rep(re):
+ """
+ Rep(re) is an RE which matches zero or more repetitions of |re|.
+ """
+ result = Opt(Rep1(re))
+ result.str = "Rep(%s)" % re
+ return result
+
+def NoCase(re):
+ """
+ NoCase(re) is an RE which matches the same strings as RE, but treating
+ upper and lower case letters as equivalent.
+ """
+ return SwitchCase(re, nocase = 1)
+
+def Case(re):
+ """
+ Case(re) is an RE which matches the same strings as RE, but treating
+ upper and lower case letters as distinct, i.e. it cancels the effect
+ of any enclosing NoCase().
+ """
+ return SwitchCase(re, nocase = 0)
+
+#
+# RE Constants
+#
+
+Bol = Char(BOL)
+Bol.__doc__ = \
+ """
+ Bol is an RE which matches the beginning of a line.
+ """
+Bol.str = "Bol"
+
+Eol = Char(EOL)
+Eol.__doc__ = \
+ """
+ Eol is an RE which matches the end of a line.
+ """
+Eol.str = "Eol"
+
+Eof = Char(EOF)
+Eof.__doc__ = \
+ """
+ Eof is an RE which matches the end of the file.
+ """
+Eof.str = "Eof"
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+#
+# Scanning an input stream
+#
+#=======================================================================
+
+import Errors
+from Regexps import BOL, EOL, EOF
+
+class Scanner:
+ """
+ A Scanner is used to read tokens from a stream of characters
+ using the token set specified by a Plex.Lexicon.
+
+ Constructor:
+
+ Scanner(lexicon, stream, name = '')
+
+ See the docstring of the __init__ method for details.
+
+ Methods:
+
+ See the docstrings of the individual methods for more
+ information.
+
+ read() --> (value, text)
+ Reads the next lexical token from the stream.
+
+ position() --> (name, line, col)
+ Returns the position of the last token read using the
+ read() method.
+
+ begin(state_name)
+ Causes scanner to change state.
+
+ produce(value [, text])
+ Causes return of a token value to the caller of the
+ Scanner.
+
+ """
+
+ lexicon = None # Lexicon
+ stream = None # file-like object
+ name = ''
+ buffer = ''
+ buf_start_pos = 0 # position in input of start of buffer
+ next_pos = 0 # position in input of next char to read
+ cur_pos = 0 # position in input of current char
+ cur_line = 1 # line number of current char
+ cur_line_start = 0 # position in input of start of current line
+ start_pos = 0 # position in input of start of token
+ start_line = 0 # line number of start of token
+ start_col = 0 # position in line of start of token
+ text = None # text of last token read
+ initial_state = None # Node
+ state_name = '' # Name of initial state
+ queue = None # list of tokens to be returned
+ trace = 0
+
+ def __init__(self, lexicon, stream, name = ''):
+ """
+ Scanner(lexicon, stream, name = '')
+
+ |lexicon| is a Plex.Lexicon instance specifying the lexical tokens
+ to be recognised.
+
+ |stream| can be a file object or anything which implements a
+ compatible read() method.
+
+ |name| is optional, and may be the name of the file being
+ scanned or any other identifying string.
+ """
+ self.lexicon = lexicon
+ self.stream = stream
+ self.name = name
+ self.queue = []
+ self.initial_state = None
+ self.begin('')
+ self.next_pos = 0
+ self.cur_pos = 0
+ self.cur_line_start = 0
+ self.cur_char = BOL
+ self.input_state = 1
+
+ def read(self):
+ """
+ Read the next lexical token from the stream and return a
+ tuple (value, text), where |value| is the value associated with
+ the token as specified by the Lexicon, and |text| is the actual
+ string read from the stream. Returns (None, '') on end of file.
+ """
+ queue = self.queue
+ while not queue:
+ self.text, action = self.scan_a_token()
+ if action is None:
+ self.produce(None)
+ self.eof()
+ else:
+ value = action.perform(self, self.text)
+ if value is not None:
+ self.produce(value)
+ result = queue[0]
+ del queue[0]
+ return result
+
+ def scan_a_token(self):
+ """
+ Read the next input sequence recognised by the machine
+ and return (text, action). Returns ('', None) on end of
+ file.
+ """
+ self.start_pos = self.cur_pos
+ self.start_line = self.cur_line
+ self.start_col = self.cur_pos - self.cur_line_start
+# if self.trace:
+# action = self.run_machine()
+# else:
+# action = self.run_machine_inlined()
+ action = self.run_machine_inlined()
+ if action:
+ if self.trace:
+ print "Scanner: read: Performing", action, "%d:%d" % (
+ self.start_pos, self.cur_pos)
+ base = self.buf_start_pos
+ text = self.buffer[self.start_pos - base : self.cur_pos - base]
+ return (text, action)
+ else:
+ if self.cur_pos == self.start_pos:
+ if self.cur_char == EOL:
+ self.next_char()
+ if not self.cur_char or self.cur_char == EOF:
+ return ('', None)
+ raise Errors.UnrecognizedInput(self, self.state_name)
+
+ def run_machine(self):
+ """
+ Run the machine until no more transitions are possible.
+ """
+ self.state = self.initial_state
+ self.backup_state = None
+ while self.transition():
+ pass
+ return self.back_up()
+
+ def run_machine_inlined(self):
+ """
+ Inlined version of run_machine for speed.
+ """
+ state = self.initial_state
+ cur_pos = self.cur_pos
+ cur_line = self.cur_line
+ cur_line_start = self.cur_line_start
+ cur_char = self.cur_char
+ input_state = self.input_state
+ next_pos = self.next_pos
+ buffer = self.buffer
+ buf_start_pos = self.buf_start_pos
+ buf_len = len(buffer)
+ backup_state = None
+ trace = self.trace
+ while 1:
+ if trace: #TRACE#
+ print "State %d, %d/%d:%s -->" % ( #TRACE#
+ state['number'], input_state, cur_pos, repr(cur_char)), #TRACE#
+ # Begin inlined self.save_for_backup()
+ #action = state.action #@slow
+ action = state['action'] #@fast
+ if action:
+ backup_state = (
+ action, cur_pos, cur_line, cur_line_start, cur_char, input_state, next_pos)
+ # End inlined self.save_for_backup()
+ c = cur_char
+ #new_state = state.new_state(c) #@slow
+ new_state = state.get(c, -1) #@fast
+ if new_state == -1: #@fast
+ new_state = c and state.get('else') #@fast
+ if new_state:
+ if trace: #TRACE#
+ print "State %d" % new_state['number'] #TRACE#
+ state = new_state
+ # Begin inlined: self.next_char()
+ if input_state == 1:
+ cur_pos = next_pos
+ # Begin inlined: c = self.read_char()
+ buf_index = next_pos - buf_start_pos
+ if buf_index < buf_len:
+ c = buffer[buf_index]
+ next_pos = next_pos + 1
+ else:
+ discard = self.start_pos - buf_start_pos
+ data = self.stream.read(0x1000)
+ buffer = self.buffer[discard:] + data
+ self.buffer = buffer
+ buf_start_pos = buf_start_pos + discard
+ self.buf_start_pos = buf_start_pos
+ buf_len = len(buffer)
+ buf_index = buf_index - discard
+ if data:
+ c = buffer[buf_index]
+ next_pos = next_pos + 1
+ else:
+ c = ''
+ # End inlined: c = self.read_char()
+ if c == '\n':
+ cur_char = EOL
+ input_state = 2
+ elif not c:
+ cur_char = EOL
+ input_state = 4
+ else:
+ cur_char = c
+ elif input_state == 2:
+ cur_char = '\n'
+ input_state = 3
+ elif input_state == 3:
+ cur_line = cur_line + 1
+ cur_line_start = cur_pos = next_pos
+ cur_char = BOL
+ input_state = 1
+ elif input_state == 4:
+ cur_char = EOF
+ input_state = 5
+ else: # input_state = 5
+ cur_char = ''
+ # End inlined self.next_char()
+ else: # not new_state
+ if trace: #TRACE#
+ print "blocked" #TRACE#
+ # Begin inlined: action = self.back_up()
+ if backup_state:
+ (action, cur_pos, cur_line, cur_line_start,
+ cur_char, input_state, next_pos) = backup_state
+ else:
+ action = None
+ break # while 1
+ # End inlined: action = self.back_up()
+ self.cur_pos = cur_pos
+ self.cur_line = cur_line
+ self.cur_line_start = cur_line_start
+ self.cur_char = cur_char
+ self.input_state = input_state
+ self.next_pos = next_pos
+ if trace: #TRACE#
+ if action: #TRACE#
+ print "Doing", action #TRACE#
+ return action
+
+# def transition(self):
+# self.save_for_backup()
+# c = self.cur_char
+# new_state = self.state.new_state(c)
+# if new_state:
+# if self.trace:
+# print "Scanner: read: State %d: %s --> State %d" % (
+# self.state.number, repr(c), new_state.number)
+# self.state = new_state
+# self.next_char()
+# return 1
+# else:
+# if self.trace:
+# print "Scanner: read: State %d: %s --> blocked" % (
+# self.state.number, repr(c))
+# return 0
+
+# def save_for_backup(self):
+# action = self.state.get_action()
+# if action:
+# if self.trace:
+# print "Scanner: read: Saving backup point at", self.cur_pos
+# self.backup_state = (
+# action, self.cur_pos, self.cur_line, self.cur_line_start,
+# self.cur_char, self.input_state, self.next_pos)
+
+# def back_up(self):
+# backup_state = self.backup_state
+# if backup_state:
+# (action, self.cur_pos, self.cur_line, self.cur_line_start,
+# self.cur_char, self.input_state, self.next_pos) = backup_state
+# if self.trace:
+# print "Scanner: read: Backing up to", self.cur_pos
+# return action
+# else:
+# return None
+
+ def next_char(self):
+ input_state = self.input_state
+ if self.trace:
+ print "Scanner: next:", " "*20, "[%d] %d" % (input_state, self.cur_pos),
+ if input_state == 1:
+ self.cur_pos = self.next_pos
+ c = self.read_char()
+ if c == '\n':
+ self.cur_char = EOL
+ self.input_state = 2
+ elif not c:
+ self.cur_char = EOL
+ self.input_state = 4
+ else:
+ self.cur_char = c
+ elif input_state == 2:
+ self.cur_char = '\n'
+ self.input_state = 3
+ elif input_state == 3:
+ self.cur_line = self.cur_line + 1
+ self.cur_line_start = self.cur_pos = self.next_pos
+ self.cur_char = BOL
+ self.input_state = 1
+ elif input_state == 4:
+ self.cur_char = EOF
+ self.input_state = 5
+ else: # input_state = 5
+ self.cur_char = ''
+ if self.trace:
+ print "--> [%d] %d %s" % (input_state, self.cur_pos, repr(self.cur_char))
+
+# def read_char(self):
+# """
+# Get the next input character, filling the buffer if necessary.
+# Returns '' at end of file.
+# """
+# next_pos = self.next_pos
+# buf_index = next_pos - self.buf_start_pos
+# if buf_index == len(self.buffer):
+# discard = self.start_pos - self.buf_start_pos
+# data = self.stream.read(0x1000)
+# self.buffer = self.buffer[discard:] + data
+# self.buf_start_pos = self.buf_start_pos + discard
+# buf_index = buf_index - discard
+# if not data:
+# return ''
+# c = self.buffer[buf_index]
+# self.next_pos = next_pos + 1
+# return c
+
+ def position(self):
+ """
+ Return a tuple (name, line, col) representing the location of
+ the last token read using the read() method. |name| is the
+ name that was provided to the Scanner constructor; |line|
+ is the line number in the stream (1-based); |col| is the
+ position within the line of the first character of the token
+ (0-based).
+ """
+ return (self.name, self.start_line, self.start_col)
+
+ def begin(self, state_name):
+ """Set the current state of the scanner to the named state."""
+ self.initial_state = (
+ self.lexicon.get_initial_state(state_name))
+ self.state_name = state_name
+
+ def produce(self, value, text = None):
+ """
+ Called from an action procedure, causes |value| to be returned
+ as the token value from read(). If |text| is supplied, it is
+ returned in place of the scanned text.
+
+ produce() can be called more than once during a single call to an action
+ procedure, in which case the tokens are queued up and returned one
+ at a time by subsequent calls to read(), until the queue is empty,
+ whereupon scanning resumes.
+ """
+ if text is None:
+ text = self.text
+ self.queue.append((value, text))
+
+ def eof(self):
+ """
+ Override this method if you want something to be done at
+ end of file.
+ """
+
+# For backward compatibility:
+setattr(Scanner, "yield", Scanner.produce)
--- /dev/null
+#
+# Get time in platform-dependent way
+#
+
+import os
+from sys import platform, exit, stderr
+
+if platform == 'mac':
+ import MacOS
+ def time():
+ return MacOS.GetTicks() / 60.0
+ timekind = "real"
+elif hasattr(os, 'times'):
+ def time():
+ t = os.times()
+ return t[0] + t[1]
+ timekind = "cpu"
+else:
+ stderr.write(
+ "Don't know how to get time on platform %s\n" % repr(platform))
+ exit(1)
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+# Traditional Regular Expression Syntax
+#
+#=======================================================================
+
+from Regexps import *
+from Errors import PlexError
+
+class RegexpSyntaxError(PlexError):
+ pass
+
+def re(s):
+ """
+ Convert traditional string representation of regular expression |s|
+ into Plex representation.
+ """
+ return REParser(s).parse_re()
+
+class REParser:
+
+ def __init__(self, s):
+ self.s = s
+ self.i = -1
+ self.end = 0
+ self.next()
+
+ def parse_re(self):
+ re = self.parse_alt()
+ if not self.end:
+ self.error("Unexpected %s" % repr(self.c))
+ return re
+
+ def parse_alt(self):
+ """Parse a set of alternative regexps."""
+ re = self.parse_seq()
+ if self.c == '|':
+ re_list = [re]
+ while self.c == '|':
+ self.next()
+ re_list.append(self.parse_seq())
+ re = apply(Alt, tuple(re_list))
+ return re
+
+ def parse_seq(self):
+ """Parse a sequence of regexps."""
+ re_list = []
+ while not self.end and not self.c in "|)":
+ re_list.append(self.parse_mod())
+ return apply(Seq, tuple(re_list))
+
+ def parse_mod(self):
+ """Parse a primitive regexp followed by *, +, ? modifiers."""
+ re = self.parse_prim()
+ while not self.end and self.c in "*+?":
+ if self.c == '*':
+ re = Rep(re)
+ elif self.c == '+':
+ re = Rep1(re)
+ else: # self.c == '?'
+ re = Opt(re)
+ self.next()
+ return re
+
+ def parse_prim(self):
+ """Parse a primitive regexp."""
+ c = self.get()
+ if c == '.':
+ re = AnyBut("\n")
+ elif c == '^':
+ re = Bol
+ elif c == '$':
+ re = Eol
+ elif c == '(':
+ re = self.parse_alt()
+ self.expect(')')
+ elif c == '[':
+ re = self.parse_charset()
+ self.expect(']')
+ else:
+ if c == '\\':
+ c = self.get()
+ re = Char(c)
+ return re
+
+ def parse_charset(self):
+ """Parse a charset. Does not include the surrounding []."""
+ char_list = []
+ invert = 0
+ if self.c == '^':
+ invert = 1
+ self.next()
+ if self.c == ']':
+ char_list.append(']')
+ self.next()
+ while not self.end and self.c <> ']':
+ c1 = self.get()
+ if self.c == '-' and self.lookahead(1) <> ']':
+ self.next()
+ c2 = self.get()
+ for a in xrange(ord(c1), ord(c2) + 1):
+ char_list.append(chr(a))
+ else:
+ char_list.append(c1)
+ chars = string.join(char_list, "")
+ if invert:
+ return AnyBut(chars)
+ else:
+ return Any(chars)
+
+ def next(self):
+ """Advance to the next char."""
+ s = self.s
+ i = self.i = self.i + 1
+ if i < len(s):
+ self.c = s[i]
+ else:
+ self.c = ''
+ self.end = 1
+
+ def get(self):
+ if self.end:
+ self.error("Premature end of string")
+ c = self.c
+ self.next()
+ return c
+
+ def lookahead(self, n):
+ """Look ahead n chars."""
+ j = self.i + n
+ if j < len(self.s):
+ return self.s[j]
+ else:
+ return ''
+
+ def expect(self, c):
+ """
+ Expect to find character |c| at current position.
+ Raises an exception otherwise.
+ """
+ if self.c == c:
+ self.next()
+ else:
+ self.error("Missing %s" % repr(c))
+
+ def error(self, mess):
+ """Raise exception to signal syntax error in regexp."""
+ raise RegexpSyntaxError("Syntax error in regexp %s at position %d: %s" % (
+ repr(self.s), self.i, mess))
+
+
+
--- /dev/null
+#
+# Plex - Transition Maps
+#
+# This version represents state sets direcly as dicts
+# for speed.
+#
+
+from copy import copy
+import string
+from sys import maxint
+from types import TupleType
+
+class TransitionMap:
+ """
+ A TransitionMap maps an input event to a set of states.
+ An input event is one of: a range of character codes,
+ the empty string (representing an epsilon move), or one
+ of the special symbols BOL, EOL, EOF.
+
+ For characters, this implementation compactly represents
+ the map by means of a list:
+
+ [code_0, states_0, code_1, states_1, code_2, states_2,
+ ..., code_n-1, states_n-1, code_n]
+
+ where |code_i| is a character code, and |states_i| is a
+ set of states corresponding to characters with codes |c|
+ in the range |code_i| <= |c| <= |code_i+1|.
+
+ The following invariants hold:
+ n >= 1
+ code_0 == -maxint
+ code_n == maxint
+ code_i < code_i+1 for i in 0..n-1
+ states_0 == states_n-1
+
+ Mappings for the special events '', BOL, EOL, EOF are
+ kept separately in a dictionary.
+ """
+
+ map = None # The list of codes and states
+ special = None # Mapping for special events
+
+ def __init__(self, map = None, special = None):
+ if not map:
+ map = [-maxint, {}, maxint]
+ if not special:
+ special = {}
+ self.map = map
+ self.special = special
+ #self.check() ###
+
+ def add(self, event, new_state,
+ TupleType = TupleType):
+ """
+ Add transition to |new_state| on |event|.
+ """
+ if type(event) == TupleType:
+ code0, code1 = event
+ i = self.split(code0)
+ j = self.split(code1)
+ map = self.map
+ while i < j:
+ map[i + 1][new_state] = 1
+ i = i + 2
+ else:
+ self.get_special(event)[new_state] = 1
+
+ def add_set(self, event, new_set,
+ TupleType = TupleType):
+ """
+ Add transitions to the states in |new_set| on |event|.
+ """
+ if type(event) == TupleType:
+ code0, code1 = event
+ i = self.split(code0)
+ j = self.split(code1)
+ map = self.map
+ while i < j:
+ map[i + 1].update(new_set)
+ i = i + 2
+ else:
+ self.get_special(event).update(new_set)
+
+ def get_epsilon(self,
+ none = None):
+ """
+ Return the mapping for epsilon, or None.
+ """
+ return self.special.get('', none)
+
+ def items(self,
+ len = len):
+ """
+ Return the mapping as a list of ((code1, code2), state_set) and
+ (special_event, state_set) pairs.
+ """
+ result = []
+ map = self.map
+ else_set = map[1]
+ i = 0
+ n = len(map) - 1
+ code0 = map[0]
+ while i < n:
+ set = map[i + 1]
+ code1 = map[i + 2]
+ if set or else_set:
+ result.append(((code0, code1), set))
+ code0 = code1
+ i = i + 2
+ for event, set in self.special.items():
+ if set:
+ result.append((event, set))
+ return result
+
+ # ------------------- Private methods --------------------
+
+ def split(self, code,
+ len = len, maxint = maxint):
+ """
+ Search the list for the position of the split point for |code|,
+ inserting a new split point if necessary. Returns index |i| such
+ that |code| == |map[i]|.
+ """
+ # We use a funky variation on binary search.
+ map = self.map
+ hi = len(map) - 1
+ # Special case: code == map[-1]
+ if code == maxint:
+ return hi
+ # General case
+ lo = 0
+ # loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2
+ while hi - lo >= 4:
+ # Find midpoint truncated to even index
+ mid = ((lo + hi) / 2) & ~1
+ if code < map[mid]:
+ hi = mid
+ else:
+ lo = mid
+ # map[lo] <= code < map[hi] and hi - lo == 2
+ if map[lo] == code:
+ return lo
+ else:
+ map[hi:hi] = [code, map[hi - 1].copy()]
+ #self.check() ###
+ return hi
+
+ def get_special(self, event):
+ """
+ Get state set for special event, adding a new entry if necessary.
+ """
+ special = self.special
+ set = special.get(event, None)
+ if not set:
+ set = {}
+ special[event] = set
+ return set
+
+ # --------------------- Conversion methods -----------------------
+
+ def __str__(self):
+ map_strs = []
+ map = self.map
+ n = len(map)
+ i = 0
+ while i < n:
+ code = map[i]
+ if code == -maxint:
+ code_str = "-inf"
+ elif code == maxint:
+ code_str = "inf"
+ else:
+ code_str = str(code)
+ map_strs.append(code_str)
+ i = i + 1
+ if i < n:
+ map_strs.append(state_set_str(map[i]))
+ i = i + 1
+ special_strs = {}
+ for event, set in self.special.items():
+ special_strs[event] = state_set_str(set)
+ return "[%s]+%s" % (
+ string.join(map_strs, ","),
+ special_strs
+ )
+
+ # --------------------- Debugging methods -----------------------
+
+ def check(self):
+ """Check data structure integrity."""
+ if not self.map[-3] < self.map[-1]:
+ print self
+ assert 0
+
+ def dump(self, file):
+ map = self.map
+ i = 0
+ n = len(map) - 1
+ while i < n:
+ self.dump_range(map[i], map[i + 2], map[i + 1], file)
+ i = i + 2
+ for event, set in self.special.items():
+ if set:
+ if not event:
+ event = 'empty'
+ self.dump_trans(event, set, file)
+
+ def dump_range(self, code0, code1, set, file):
+ if set:
+ if code0 == -maxint:
+ if code1 == maxint:
+ k = "any"
+ else:
+ k = "< %s" % self.dump_char(code1)
+ elif code1 == maxint:
+ k = "> %s" % self.dump_char(code0 - 1)
+ elif code0 == code1 - 1:
+ k = self.dump_char(code0)
+ else:
+ k = "%s..%s" % (self.dump_char(code0),
+ self.dump_char(code1 - 1))
+ self.dump_trans(k, set, file)
+
+ def dump_char(self, code):
+ if 0 <= code <= 255:
+ return repr(chr(code))
+ else:
+ return "chr(%d)" % code
+
+ def dump_trans(self, key, set, file):
+ file.write(" %s --> %s\n" % (key, self.dump_set(set)))
+
+ def dump_set(self, set):
+ return state_set_str(set)
+
+#
+# State set manipulation functions
+#
+
+#def merge_state_sets(set1, set2):
+# for state in set2.keys():
+# set1[state] = 1
+
+def state_set_str(set):
+ state_list = set.keys()
+ str_list = []
+ for state in state_list:
+ str_list.append("S%d" % state.number)
+ return "[%s]" % string.join(str_list, ",")
+
+
+
--- /dev/null
+#=======================================================================
+#
+# Python Lexical Analyser
+#
+#=======================================================================
+
+"""
+The Plex module provides lexical analysers with similar capabilities
+to GNU Flex. The following classes and functions are exported;
+see the attached docstrings for more information.
+
+ Scanner For scanning a character stream under the
+ direction of a Lexicon.
+
+ Lexicon For constructing a lexical definition
+ to be used by a Scanner.
+
+ Str, Any, AnyBut, AnyChar, Seq, Alt, Opt, Rep, Rep1,
+ Bol, Eol, Eof, Empty
+
+ Regular expression constructors, for building pattern
+ definitions for a Lexicon.
+
+ State For defining scanner states when creating a
+ Lexicon.
+
+ TEXT, IGNORE, Begin
+
+ Actions for associating with patterns when
+ creating a Lexicon.
+"""
+
+from Actions import TEXT, IGNORE, Begin
+from Lexicons import Lexicon, State
+from Regexps import RE, Seq, Alt, Rep1, Empty, Str, Any, AnyBut, AnyChar, Range
+from Regexps import Opt, Rep, Bol, Eol, Eof, Case, NoCase
+from Scanners import Scanner
+
+
+
--- /dev/null
+import sys
+sys.stderr = sys.stdout
+
+from TransitionMaps import TransitionMap
+
+m = TransitionMap()
+print m
+
+def add(c, s):
+ print
+ print "adding", repr(c), "-->", repr(s)
+ m.add_transition(c, s)
+ print m
+ print "keys:", m.keys()
+
+add('a','alpha')
+add('e', 'eta')
+add('f', 'foo')
+add('i', 'iota')
+add('i', 'imp')
+add('eol', 'elephant')
+
+
+
--- /dev/null
+#
+# Pyrex -- Things that don't belong
+# anywhere else in particular
+#
+
+import os, sys
+
+def replace_suffix(path, newsuf):
+ base, _ = os.path.splitext(path)
+ return base + newsuf
+
+def open_new_file(path):
+ # Open and truncate existing file to
+ # preserve metadata on the Mac.
+ return open(path, "w+")
+