diff options
author | 2016-05-11 08:24:40 +0200 | |
---|---|---|
committer | 2016-05-11 08:24:40 +0200 | |
commit | 49dd28056415ee01ed66ec75db3a234da2ad287a (patch) | |
tree | 50c2c512bb193d9612a60ae218655ba864d50176 /rpython | |
parent | Backend (diff) | |
parent | Add comment (diff) | |
download | pypy-49dd28056415ee01ed66ec75db3a234da2ad287a.tar.gz pypy-49dd28056415ee01ed66ec75db3a234da2ad287a.tar.bz2 pypy-49dd28056415ee01ed66ec75db3a234da2ad287a.zip |
hg merge default
Diffstat (limited to 'rpython')
149 files changed, 2485 insertions, 851 deletions
diff --git a/rpython/annotator/annrpython.py b/rpython/annotator/annrpython.py index 88e3f7e002..343d71ce1a 100644 --- a/rpython/annotator/annrpython.py +++ b/rpython/annotator/annrpython.py @@ -342,10 +342,10 @@ class RPythonAnnotator(object): del self.blocked_blocks[block] try: self.flowin(graph, block) - except BlockedInference, e: + except BlockedInference as e: self.annotated[block] = False # failed, hopefully temporarily self.blocked_blocks[block] = (graph, e.opindex) - except Exception, e: + except Exception as e: # hack for debug tools only if not hasattr(e, '__annotator_block'): setattr(e, '__annotator_block', block) @@ -379,7 +379,7 @@ class RPythonAnnotator(object): oldcells = [self.binding(a) for a in block.inputargs] try: unions = [annmodel.unionof(c1,c2) for c1, c2 in zip(oldcells,inputcells)] - except annmodel.UnionError, e: + except annmodel.UnionError as e: # Add source code to the UnionError e.source = '\n'.join(source_lines(graph, block, None, long=True)) raise diff --git a/rpython/annotator/classdesc.py b/rpython/annotator/classdesc.py index 52b574b3e7..ec2ed1dca4 100644 --- a/rpython/annotator/classdesc.py +++ b/rpython/annotator/classdesc.py @@ -579,6 +579,14 @@ class ClassDesc(Desc): if cls not in FORCE_ATTRIBUTES_INTO_CLASSES: self.all_enforced_attrs = [] # no attribute allowed + if (getattr(cls, '_must_be_light_finalizer_', False) and + hasattr(cls, '__del__') and + not getattr(cls.__del__, '_must_be_light_finalizer_', False)): + raise AnnotatorError( + "Class %r is in a class hierarchy with " + "_must_be_light_finalizer_ = True: it cannot have a " + "finalizer without @rgc.must_be_light_finalizer" % (cls,)) + def add_source_attribute(self, name, value, mixin=False): if isinstance(value, property): # special case for property object diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py index 632ee04f10..b8b528d24e 100644 --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -278,7 +278,7 @@ class FunctionDesc(Desc): defs_s.append(self.bookkeeper.immutablevalue(x)) try: inputcells = args.match_signature(signature, defs_s) - except ArgErr, e: + except ArgErr as e: raise AnnotatorError("signature mismatch: %s() %s" % (self.name, e.getmsg())) return inputcells diff --git a/rpython/annotator/test/test_annrpython.py b/rpython/annotator/test/test_annrpython.py index fd66f2a846..d646b52cd0 100644 --- a/rpython/annotator/test/test_annrpython.py +++ b/rpython/annotator/test/test_annrpython.py @@ -902,7 +902,7 @@ class TestAnnotateTestCase: def f(l): try: l[0] - except (KeyError, IndexError),e: + except (KeyError, IndexError) as e: return e return None @@ -4577,6 +4577,39 @@ class TestAnnotateTestCase: with py.test.raises(AnnotatorError): a.build_types(f, [float]) + def test_Ellipsis_not_rpython(self): + def f(): + return Ellipsis + a = self.RPythonAnnotator() + e = py.test.raises(Exception, a.build_types, f, []) + assert str(e.value) == "Don't know how to represent Ellipsis" + + def test_must_be_light_finalizer(self): + from rpython.rlib import rgc + @rgc.must_be_light_finalizer + class A(object): + pass + class B(A): + def __del__(self): + pass + class C(A): + @rgc.must_be_light_finalizer + def __del__(self): + pass + class D(object): + def __del__(self): + pass + def fb(): + B() + def fc(): + C() + def fd(): + D() + a = self.RPythonAnnotator() + a.build_types(fc, []) + a.build_types(fd, []) + py.test.raises(AnnotatorError, a.build_types, fb, []) + def g(n): return [0, 1, 2, n] diff --git a/rpython/bin/translatorshell.py b/rpython/bin/translatorshell.py index 7fb68668fd..510a77582a 100755 --- a/rpython/bin/translatorshell.py +++ b/rpython/bin/translatorshell.py @@ -61,7 +61,7 @@ def setup_readline(): if __name__ == '__main__': try: setup_readline() - except ImportError, err: + except ImportError as err: print "Disabling readline support (%s)" % err from rpython.translator.test import snippet from rpython.rtyper.rtyper import RPythonTyper diff --git a/rpython/conftest.py b/rpython/conftest.py index 544eabb894..e4670825c0 100644 --- a/rpython/conftest.py +++ b/rpython/conftest.py @@ -82,7 +82,13 @@ class LeakFinder: return if (not getattr(item.obj, 'dont_track_allocations', False) and leakfinder.TRACK_ALLOCATIONS): - item._pypytest_leaks = leakfinder.stop_tracking_allocations(False) + kwds = {} + try: + kwds['do_collection'] = item.track_allocations_collect + except AttributeError: + pass + item._pypytest_leaks = leakfinder.stop_tracking_allocations(False, + **kwds) else: # stop_tracking_allocations() already called item._pypytest_leaks = None diff --git a/rpython/doc/rpython.rst b/rpython/doc/rpython.rst index 71ca5475d0..4e1a5ae681 100644 --- a/rpython/doc/rpython.rst +++ b/rpython/doc/rpython.rst @@ -191,6 +191,12 @@ We are using ``__setitem__`` for slicing isn't supported. Additionally, using negative indices for slicing is still not support, even when using ``__getslice__``. + Note that the destructor ``__del__`` should only contain `simple + operations`__; for any kind of more complex destructor, consider + using instead ``rpython.rlib.rgc.FinalizerQueue``. + +.. __: garbage_collection.html + This layout makes the number of types to take care about quite limited. diff --git a/rpython/flowspace/model.py b/rpython/flowspace/model.py index c07632cc43..2b75cbb0f4 100644 --- a/rpython/flowspace/model.py +++ b/rpython/flowspace/model.py @@ -156,7 +156,7 @@ class Link(object): def show(self): from rpython.translator.tool.graphpage import try_show - try_show(self) + return try_show(self) view = show @@ -239,7 +239,7 @@ class Block(object): def show(self): from rpython.translator.tool.graphpage import try_show - try_show(self) + return try_show(self) def _slowly_get_graph(self): import gc @@ -677,7 +677,7 @@ def checkgraph(graph): assert len(allexitcases) == len(block.exits) vars_previous_blocks.update(vars) - except AssertionError, e: + except AssertionError as e: # hack for debug tools only #graph.show() # <== ENABLE THIS TO SEE THE BROKEN GRAPH if block and not hasattr(e, '__annotator_block'): diff --git a/rpython/jit/backend/arm/test/support.py b/rpython/jit/backend/arm/test/support.py index 9e34499cd4..60ecbfa48c 100644 --- a/rpython/jit/backend/arm/test/support.py +++ b/rpython/jit/backend/arm/test/support.py @@ -67,7 +67,7 @@ def gen_test_function(name, asm, args, kwargs=None, asm_ext=None): func(*args, **kwargs) try: f_name = name[:name.index('_')] - except ValueError, e: + except ValueError as e: f_name = name self.assert_equal('%s%s %s' % (f_name, asm_ext, asm)) return f diff --git a/rpython/jit/backend/detect_cpu.py b/rpython/jit/backend/detect_cpu.py index adab7c31e8..bc76649ff7 100644 --- a/rpython/jit/backend/detect_cpu.py +++ b/rpython/jit/backend/detect_cpu.py @@ -35,7 +35,7 @@ def detect_model_from_c_compiler(): if not getdefined(macro, ''): continue return k - raise ProcessorAutodetectError, "Cannot detect processor using compiler macros" + raise ProcessorAutodetectError("Cannot detect processor using compiler macros") def detect_model_from_host_platform(): @@ -52,7 +52,7 @@ def detect_model_from_host_platform(): # assume we have 'uname' mach = os.popen('uname -m', 'r').read().strip() if not mach: - raise ProcessorAutodetectError, "cannot run 'uname -m'" + raise ProcessorAutodetectError("cannot run 'uname -m'") # result ={'i386': MODEL_X86, 'i486': MODEL_X86, @@ -74,7 +74,7 @@ def detect_model_from_host_platform(): }.get(mach) if result is None: - raise ProcessorAutodetectError, "unknown machine name %s" % mach + raise ProcessorAutodetectError("unknown machine name %s" % mach) # if result.startswith('x86'): from rpython.jit.backend.x86 import detect_feature as feature @@ -128,7 +128,7 @@ def getcpuclassname(backend_name="auto"): elif backend_name == MODEL_S390_64: return "rpython.jit.backend.zarch.runner", "CPU_S390_64" else: - raise ProcessorAutodetectError, ( + raise ProcessorAutodetectError( "we have no JIT backend for this cpu: '%s'" % backend_name) def getcpuclass(backend_name="auto"): diff --git a/rpython/jit/backend/llgraph/runner.py b/rpython/jit/backend/llgraph/runner.py index ba042003ad..4e0c56d140 100644 --- a/rpython/jit/backend/llgraph/runner.py +++ b/rpython/jit/backend/llgraph/runner.py @@ -404,7 +404,7 @@ class LLGraphCPU(model.AbstractCPU): try: frame.execute(lltrace) assert False - except ExecutionFinished, e: + except ExecutionFinished as e: return e.deadframe def get_value_direct(self, deadframe, tp, index): @@ -479,6 +479,9 @@ class LLGraphCPU(model.AbstractCPU): all_descrs.append(v) return all_descrs + def fetch_all_descrs(self): + return self.descrs.values() + def calldescrof(self, FUNC, ARGS, RESULT, effect_info): key = ('call', getkind(RESULT), tuple([getkind(A) for A in ARGS]), @@ -1094,7 +1097,7 @@ class LLFrame(object): execute = getattr(self, 'execute_' + op.getopname()) try: resval = execute(_getdescr(op), *args) - except Jump, j: + except Jump as j: self.lltrace, i = j.jump_target if i >= 0: label_op = self.lltrace.operations[i] @@ -1345,7 +1348,7 @@ class LLFrame(object): try: res = self.cpu.maybe_on_top_of_llinterp(func, call_args, TP.RESULT) self.last_exception = None - except LLException, lle: + except LLException as lle: self.last_exception = lle res = _example_res[getkind(TP.RESULT)[0]] return res @@ -1441,7 +1444,7 @@ class LLFrame(object): assembler_helper_ptr = jd.assembler_helper_adr.ptr # fish try: result = assembler_helper_ptr(pframe, vable) - except LLException, lle: + except LLException as lle: assert self.last_exception is None, "exception left behind" self.last_exception = lle # fish op diff --git a/rpython/jit/backend/llsupport/llmodel.py b/rpython/jit/backend/llsupport/llmodel.py index a652ca9d0c..88825b2ce5 100644 --- a/rpython/jit/backend/llsupport/llmodel.py +++ b/rpython/jit/backend/llsupport/llmodel.py @@ -144,7 +144,7 @@ class AbstractLLCPU(AbstractCPU): # all other fields are empty llop.gc_writebarrier(lltype.Void, new_frame) return lltype.cast_opaque_ptr(llmemory.GCREF, new_frame) - except Exception, e: + except Exception as e: print "Unhandled exception", e, "in realloc_frame" return lltype.nullptr(llmemory.GCREF.TO) diff --git a/rpython/jit/backend/llsupport/test/zrpy_gc_test.py b/rpython/jit/backend/llsupport/test/zrpy_gc_test.py index 52532e38bf..8327196c27 100644 --- a/rpython/jit/backend/llsupport/test/zrpy_gc_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_gc_test.py @@ -176,7 +176,7 @@ class BaseFrameworkTests(object): cls.cbuilder = compile(get_entry(allfuncs), cls.gc, gcrootfinder=cls.gcrootfinder, jit=True, thread=True) - except ConfigError, e: + except ConfigError as e: assert str(e).startswith('invalid value asmgcc') py.test.skip('asmgcc not supported') finally: diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py index 7bf2a142b8..91d06f8163 100644 --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -34,7 +34,7 @@ class CompiledVmprofTest(CCompiledMixin): try: rvmprof.register_code_object_class(MyCode, get_name) - except rvmprof.VMProfPlatformUnsupported, e: + except rvmprof.VMProfPlatformUnsupported as e: py.test.skip(str(e)) def get_unique_id(code): diff --git a/rpython/jit/backend/llsupport/test/ztranslation_test.py b/rpython/jit/backend/llsupport/test/ztranslation_test.py index 304b1e393e..ad70b86b3e 100644 --- a/rpython/jit/backend/llsupport/test/ztranslation_test.py +++ b/rpython/jit/backend/llsupport/test/ztranslation_test.py @@ -288,7 +288,7 @@ class TranslationRemoveTypePtrTest(CCompiledMixin): def main(i): try: myportal(i) - except ImDone, e: + except ImDone as e: return e.resvalue # XXX custom fishing, depends on the exact env var and format @@ -297,7 +297,7 @@ class TranslationRemoveTypePtrTest(CCompiledMixin): try: res = self.meta_interp(main, [400]) assert res == main(400) - except ConfigError,e: + except ConfigError as e: assert str(e).startswith('invalid value asmgcc') py.test.skip('asmgcc not supported') finally: diff --git a/rpython/jit/backend/ppc/form.py b/rpython/jit/backend/ppc/form.py index 2f84ca1dda..7d6055db2d 100644 --- a/rpython/jit/backend/ppc/form.py +++ b/rpython/jit/backend/ppc/form.py @@ -48,7 +48,7 @@ class IBoundDesc(object): def __call__(self, *args, **kw): fieldvalues, sparefields = self.calc_fields(args, kw) if sparefields: - raise FormException, 'fields %s left'%sparefields + raise FormException('fields %s left'%sparefields) self.assembler.insts.append(Instruction(fieldvalues)) @@ -72,7 +72,7 @@ class IDesc(object): self.boundtype = boundtype for field in specializations: if field not in fields: - raise FormException, field + raise FormException(field) def __get__(self, ob, cls=None): if ob is None: return self @@ -91,14 +91,14 @@ class IDesc(object): for fname, v in more_specializatons.iteritems(): field = self.fieldmap[fname] if field not in self.fields: - raise FormException, "don't know about '%s' here" % field + raise FormException("don't know about '%s' here" % field) if isinstance(v, str): ds[field] = self.fieldmap[v] else: ms[field] = v s.update(ms) if len(s) != len(self.specializations) + len(ms): - raise FormException, "respecialization not currently allowed" + raise FormException("respecialization not currently allowed") if ds: fields = list(self.fields) for field in ds: @@ -175,8 +175,8 @@ class Form(object): overlap = True for b in range(field.left, field.right+1): if not overlap and b in bits: - raise FormException, "'%s' and '%s' clash at bit '%s'"%( - bits[b], fname, b) + raise FormException("'%s' and '%s' clash at bit '%s'"%( + bits[b], fname, b)) else: bits[b] = fname self.fields.append(field) @@ -186,7 +186,7 @@ class Form(object): for fname in specializations: field = self.fieldmap[fname] if field not in self.fields: - raise FormException, "no nothin bout '%s'"%fname + raise FormException("no nothin bout '%s'"%fname) s[field] = specializations[fname] return IDesc(self.fieldmap, self.fields, s) diff --git a/rpython/jit/codewriter/call.py b/rpython/jit/codewriter/call.py index dda8288a28..a48c8e1350 100644 --- a/rpython/jit/codewriter/call.py +++ b/rpython/jit/codewriter/call.py @@ -301,7 +301,8 @@ class CallControl(object): # assert effectinfo is not None if elidable or loopinvariant: - assert extraeffect != EffectInfo.EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE + assert (effectinfo.extraeffect < + EffectInfo.EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE) # XXX this should also say assert not can_invalidate, but # it can't because our analyzer is not good enough for now # (and getexecutioncontext() can't really invalidate) diff --git a/rpython/jit/codewriter/effectinfo.py b/rpython/jit/codewriter/effectinfo.py index 88c00eff09..75e9de7fba 100644 --- a/rpython/jit/codewriter/effectinfo.py +++ b/rpython/jit/codewriter/effectinfo.py @@ -1,7 +1,9 @@ +import sys from rpython.jit.metainterp.typesystem import deref, fieldType, arrayItem from rpython.rtyper.rclass import OBJECT from rpython.rtyper.lltypesystem import lltype, llmemory from rpython.translator.backendopt.graphanalyze import BoolGraphAnalyzer +from rpython.tool.algo import bitstring class EffectInfo(object): @@ -115,12 +117,20 @@ class EffectInfo(object): can_invalidate=False, call_release_gil_target=_NO_CALL_RELEASE_GIL_TARGET, extradescrs=None): - key = (frozenset_or_none(readonly_descrs_fields), - frozenset_or_none(readonly_descrs_arrays), - frozenset_or_none(readonly_descrs_interiorfields), - frozenset_or_none(write_descrs_fields), - frozenset_or_none(write_descrs_arrays), - frozenset_or_none(write_descrs_interiorfields), + readonly_descrs_fields = frozenset_or_none(readonly_descrs_fields) + readonly_descrs_arrays = frozenset_or_none(readonly_descrs_arrays) + readonly_descrs_interiorfields = frozenset_or_none( + readonly_descrs_interiorfields) + write_descrs_fields = frozenset_or_none(write_descrs_fields) + write_descrs_arrays = frozenset_or_none(write_descrs_arrays) + write_descrs_interiorfields = frozenset_or_none( + write_descrs_interiorfields) + key = (readonly_descrs_fields, + readonly_descrs_arrays, + readonly_descrs_interiorfields, + write_descrs_fields, + write_descrs_arrays, + write_descrs_interiorfields, extraeffect, oopspecindex, can_invalidate) @@ -144,22 +154,34 @@ class EffectInfo(object): assert write_descrs_arrays is not None assert write_descrs_interiorfields is not None result = object.__new__(cls) - result.readonly_descrs_fields = readonly_descrs_fields - result.readonly_descrs_arrays = readonly_descrs_arrays - result.readonly_descrs_interiorfields = readonly_descrs_interiorfields + # the frozensets "._readonly_xxx" and "._write_xxx" should not be + # translated. + result._readonly_descrs_fields = readonly_descrs_fields + result._readonly_descrs_arrays = readonly_descrs_arrays + result._readonly_descrs_interiorfields = readonly_descrs_interiorfields if extraeffect == EffectInfo.EF_LOOPINVARIANT or \ extraeffect == EffectInfo.EF_ELIDABLE_CANNOT_RAISE or \ extraeffect == EffectInfo.EF_ELIDABLE_OR_MEMORYERROR or \ extraeffect == EffectInfo.EF_ELIDABLE_CAN_RAISE: # Ignore the writes. Note that this ignores also writes with # no corresponding reads (rarely the case, but possible). - result.write_descrs_fields = [] - result.write_descrs_arrays = [] - result.write_descrs_interiorfields = [] + result._write_descrs_fields = frozenset() + result._write_descrs_arrays = frozenset() + result._write_descrs_interiorfields = frozenset() else: - result.write_descrs_fields = write_descrs_fields - result.write_descrs_arrays = write_descrs_arrays - result.write_descrs_interiorfields = write_descrs_interiorfields + result._write_descrs_fields = write_descrs_fields + result._write_descrs_arrays = write_descrs_arrays + result._write_descrs_interiorfields = write_descrs_interiorfields + # initialized later, in compute_bitstrings() + # (the goal of this is to make sure we don't build new EffectInfo + # instances after compute_bitstrings() is called) + result.bitstring_readonly_descrs_fields = Ellipsis + result.bitstring_readonly_descrs_arrays = Ellipsis + result.bitstring_readonly_descrs_interiorfields = Ellipsis + result.bitstring_write_descrs_fields = Ellipsis + result.bitstring_write_descrs_arrays = Ellipsis + result.bitstring_write_descrs_interiorfields = Ellipsis + # result.extraeffect = extraeffect result.can_invalidate = can_invalidate result.oopspecindex = oopspecindex @@ -167,9 +189,38 @@ class EffectInfo(object): result.call_release_gil_target = call_release_gil_target if result.check_can_raise(ignore_memoryerror=True): assert oopspecindex in cls._OS_CANRAISE + + if (result._write_descrs_arrays is not None and + len(result._write_descrs_arrays) == 1): + # this is used only for ARRAYCOPY operations + [result.single_write_descr_array] = result._write_descrs_arrays + else: + result.single_write_descr_array = None + cls._cache[key] = result return result + def check_readonly_descr_field(self, fielddescr): + return bitstring.bitcheck(self.bitstring_readonly_descrs_fields, + fielddescr.ei_index) + def check_write_descr_field(self, fielddescr): + return bitstring.bitcheck(self.bitstring_write_descrs_fields, + fielddescr.ei_index) + def check_readonly_descr_array(self, arraydescr): + return bitstring.bitcheck(self.bitstring_readonly_descrs_arrays, + arraydescr.ei_index) + def check_write_descr_array(self, arraydescr): + return bitstring.bitcheck(self.bitstring_write_descrs_arrays, + arraydescr.ei_index) + def check_readonly_descr_interiorfield(self, interiorfielddescr): + # NOTE: this is not used so far + return bitstring.bitcheck(self.bitstring_readonly_descrs_interiorfields, + interiorfielddescr.ei_index) + def check_write_descr_interiorfield(self, interiorfielddescr): + # NOTE: this is not used so far + return bitstring.bitcheck(self.bitstring_write_descrs_interiorfields, + interiorfielddescr.ei_index) + def check_can_raise(self, ignore_memoryerror=False): if ignore_memoryerror: return self.extraeffect > self.EF_ELIDABLE_OR_MEMORYERROR @@ -387,3 +438,88 @@ class CallInfoCollection(object): assert funcptr return funcptr funcptr_for_oopspec._annspecialcase_ = 'specialize:arg(1)' + +# ____________________________________________________________ + +def compute_bitstrings(all_descrs): + # Compute the bitstrings in the EffectInfo, + # bitstring_{readonly,write}_descrs_{fieldd,arrays,interiordescrs}, + # and for each FieldDescrs and ArrayDescrs compute 'ei_index'. + # Each bit in the bitstrings says whether this Descr is present in + # this EffectInfo or not. We try to share the value of 'ei_index' + # across multiple Descrs if they always give the same answer (in + # PyPy, it reduces the length of the bitstrings from 4000+ to + # 373). + from rpython.jit.codewriter.policy import log + + log("compute_bitstrings:") + effectinfos = [] + descrs = {'fields': set(), 'arrays': set(), 'interiorfields': set()} + for descr in all_descrs: + if hasattr(descr, 'get_extra_info'): + ei = descr.get_extra_info() + if ei is None: + continue + if ei._readonly_descrs_fields is None: + for key in descrs: + assert getattr(ei, '_readonly_descrs_' + key) is None + assert getattr(ei, '_write_descrs_' + key) is None + setattr(ei, 'bitstring_readonly_descrs_' + key, None) + setattr(ei, 'bitstring_write_descrs_' + key, None) + else: + effectinfos.append(ei) + for key in descrs: + descrs[key].update(getattr(ei, '_readonly_descrs_' + key)) + descrs[key].update(getattr(ei, '_write_descrs_' + key)) + else: + descr.ei_index = sys.maxint + log(" %d effectinfos:" % (len(effectinfos),)) + for key in sorted(descrs): + log(" %d descrs for %s" % (len(descrs[key]), key)) + + seen = set() + for key in descrs: + all_sets = [] + for descr in descrs[key]: + eisetr = [ei for ei in effectinfos + if descr in getattr(ei, '_readonly_descrs_' + key)] + eisetw = [ei for ei in effectinfos + if descr in getattr(ei, '_write_descrs_' + key)] + # these are the set of all ei such that this descr is in + # ei._readonly_descrs or ei._write_descrs + eisetr = frozenset(eisetr) + eisetw = frozenset(eisetw) + all_sets.append((descr, eisetr, eisetw)) + + # heuristic to reduce the total size of the bitstrings: start with + # numbering the descrs that are seen in many EffectInfos. If instead, + # by lack of chance, such a descr had a high number, then all these + # EffectInfos' bitstrings would need to store the same high number. + def size_of_both_sets((d, r, w)): + return len(r) + len(w) + all_sets.sort(key=size_of_both_sets, reverse=True) + + mapping = {} + for (descr, eisetr, eisetw) in all_sets: + assert descr.ei_index == sys.maxint # not modified yet + descr.ei_index = mapping.setdefault((eisetr, eisetw), len(mapping)) + + for ei in effectinfos: + bitstrr = [descr.ei_index + for descr in getattr(ei, '_readonly_descrs_' + key)] + bitstrw = [descr.ei_index + for descr in getattr(ei, '_write_descrs_' + key)] + assert sys.maxint not in bitstrr + assert sys.maxint not in bitstrw + bitstrr = bitstring.make_bitstring(bitstrr) + bitstrw = bitstring.make_bitstring(bitstrw) + setattr(ei, 'bitstring_readonly_descrs_' + key, bitstrr) + setattr(ei, 'bitstring_write_descrs_' + key, bitstrw) + seen.add(bitstrr) + seen.add(bitstrw) + + if seen: + mean_length = float(sum(len(x) for x in seen)) / len(seen) + max_length = max(len(x) for x in seen) + log("-> %d bitstrings, mean length %.1f, max length %d" % ( + len(seen), mean_length, max_length)) diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py index 41d4e75e75..fa29ef9723 100644 --- a/rpython/jit/codewriter/jtransform.py +++ b/rpython/jit/codewriter/jtransform.py @@ -760,7 +760,7 @@ class Transformer(object): return [SpaceOperation('-live-', [], None), SpaceOperation('getfield_vable_%s' % kind, [v_inst, descr], op.result)] - except VirtualizableArrayField, e: + except VirtualizableArrayField as e: # xxx hack hack hack vinfo = e.args[1] arrayindex = vinfo.array_field_counter[op.args[1].value] diff --git a/rpython/jit/codewriter/policy.py b/rpython/jit/codewriter/policy.py index 374aeee4d2..3d79ca24e3 100644 --- a/rpython/jit/codewriter/policy.py +++ b/rpython/jit/codewriter/policy.py @@ -103,7 +103,7 @@ def contains_unsupported_variable_type(graph, supports_floats, getkind(v.concretetype, supports_floats, supports_longlong, supports_singlefloats) - except NotImplementedError, e: + except NotImplementedError as e: log.WARNING('%s, ignoring graph' % (e,)) log.WARNING(' %s' % (graph,)) return True diff --git a/rpython/jit/codewriter/test/test_effectinfo.py b/rpython/jit/codewriter/test/test_effectinfo.py index fbc0b9cfe1..81f5f54483 100644 --- a/rpython/jit/codewriter/test/test_effectinfo.py +++ b/rpython/jit/codewriter/test/test_effectinfo.py @@ -1,11 +1,12 @@ -import pytest +import pytest, sys from rpython.jit.codewriter.effectinfo import (effectinfo_from_writeanalyze, - EffectInfo, VirtualizableAnalyzer) + EffectInfo, VirtualizableAnalyzer, compute_bitstrings) from rpython.rlib import jit from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.rclass import OBJECT from rpython.translator.translator import TranslationContext, graphof +from rpython.tool.algo.bitstring import bitcheck class FakeCPU(object): @@ -29,37 +30,39 @@ def test_include_read_field(): S = lltype.GcStruct("S", ("a", lltype.Signed)) effects = frozenset([("readstruct", lltype.Ptr(S), "a")]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert list(effectinfo.readonly_descrs_fields) == [('fielddescr', S, "a")] - assert not effectinfo.write_descrs_fields - assert not effectinfo.write_descrs_arrays + assert list(effectinfo._readonly_descrs_fields) == [('fielddescr', S, "a")] + assert not effectinfo._write_descrs_fields + assert not effectinfo._write_descrs_arrays + assert effectinfo.single_write_descr_array is None def test_include_write_field(): S = lltype.GcStruct("S", ("a", lltype.Signed)) effects = frozenset([("struct", lltype.Ptr(S), "a")]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert list(effectinfo.write_descrs_fields) == [('fielddescr', S, "a")] - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.write_descrs_arrays + assert list(effectinfo._write_descrs_fields) == [('fielddescr', S, "a")] + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._write_descrs_arrays def test_include_read_array(): A = lltype.GcArray(lltype.Signed) effects = frozenset([("readarray", lltype.Ptr(A))]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert not effectinfo.readonly_descrs_fields - assert list(effectinfo.readonly_descrs_arrays) == [('arraydescr', A)] - assert not effectinfo.write_descrs_fields - assert not effectinfo.write_descrs_arrays + assert not effectinfo._readonly_descrs_fields + assert list(effectinfo._readonly_descrs_arrays) == [('arraydescr', A)] + assert not effectinfo._write_descrs_fields + assert not effectinfo._write_descrs_arrays def test_include_write_array(): A = lltype.GcArray(lltype.Signed) effects = frozenset([("array", lltype.Ptr(A))]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.write_descrs_fields - assert list(effectinfo.write_descrs_arrays) == [('arraydescr', A)] + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._write_descrs_fields + assert list(effectinfo._write_descrs_arrays) == [('arraydescr', A)] + assert effectinfo.single_write_descr_array == ('arraydescr', A) def test_dont_include_read_and_write_field(): @@ -67,9 +70,9 @@ def test_dont_include_read_and_write_field(): effects = frozenset([("readstruct", lltype.Ptr(S), "a"), ("struct", lltype.Ptr(S), "a")]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert not effectinfo.readonly_descrs_fields - assert list(effectinfo.write_descrs_fields) == [('fielddescr', S, "a")] - assert not effectinfo.write_descrs_arrays + assert not effectinfo._readonly_descrs_fields + assert list(effectinfo._write_descrs_fields) == [('fielddescr', S, "a")] + assert not effectinfo._write_descrs_arrays def test_dont_include_read_and_write_array(): @@ -77,34 +80,34 @@ def test_dont_include_read_and_write_array(): effects = frozenset([("readarray", lltype.Ptr(A)), ("array", lltype.Ptr(A))]) effectinfo = effectinfo_from_writeanalyze(effects, FakeCPU()) - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.readonly_descrs_arrays - assert not effectinfo.write_descrs_fields - assert list(effectinfo.write_descrs_arrays) == [('arraydescr', A)] + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._readonly_descrs_arrays + assert not effectinfo._write_descrs_fields + assert list(effectinfo._write_descrs_arrays) == [('arraydescr', A)] def test_filter_out_typeptr(): effects = frozenset([("struct", lltype.Ptr(OBJECT), "typeptr")]) effectinfo = effectinfo_from_writeanalyze(effects, None) - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.write_descrs_fields - assert not effectinfo.write_descrs_arrays + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._write_descrs_fields + assert not effectinfo._write_descrs_arrays def test_filter_out_array_of_void(): effects = frozenset([("array", lltype.Ptr(lltype.GcArray(lltype.Void)))]) effectinfo = effectinfo_from_writeanalyze(effects, None) - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.write_descrs_fields - assert not effectinfo.write_descrs_arrays + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._write_descrs_fields + assert not effectinfo._write_descrs_arrays def test_filter_out_struct_with_void(): effects = frozenset([("struct", lltype.Ptr(lltype.GcStruct("x", ("a", lltype.Void))), "a")]) effectinfo = effectinfo_from_writeanalyze(effects, None) - assert not effectinfo.readonly_descrs_fields - assert not effectinfo.write_descrs_fields - assert not effectinfo.write_descrs_arrays + assert not effectinfo._readonly_descrs_fields + assert not effectinfo._write_descrs_fields + assert not effectinfo._write_descrs_arrays class TestVirtualizableAnalyzer(object): @@ -138,3 +141,64 @@ class TestVirtualizableAnalyzer(object): res = self.analyze(entry, [int]) assert not res + + +def test_compute_bitstrings(): + class FDescr: + pass + class ADescr: + pass + class CDescr: + def __init__(self, ei): + self._ei = ei + def get_extra_info(self): + return self._ei + + f1descr = FDescr() + f2descr = FDescr() + f3descr = FDescr() + a1descr = ADescr() + a2descr = ADescr() + + ei1 = EffectInfo(None, None, None, None, None, None, + EffectInfo.EF_RANDOM_EFFECTS) + ei2 = EffectInfo([f1descr], [], [], [], [], []) + ei3 = EffectInfo([f1descr], [a1descr, a2descr], [], [f2descr], [], []) + + compute_bitstrings([CDescr(ei1), CDescr(ei2), CDescr(ei3), + f1descr, f2descr, f3descr, a1descr, a2descr]) + + assert f1descr.ei_index in (0, 1) + assert f2descr.ei_index == 1 - f1descr.ei_index + assert f3descr.ei_index == sys.maxint + assert a1descr.ei_index == 0 + assert a2descr.ei_index == 0 + + assert ei1.bitstring_readonly_descrs_fields is None + assert ei1.bitstring_readonly_descrs_arrays is None + assert ei1.bitstring_write_descrs_fields is None + + def expand(bitstr): + return [n for n in range(10) if bitcheck(bitstr, n)] + + assert expand(ei2.bitstring_readonly_descrs_fields) == [f1descr.ei_index] + assert expand(ei2.bitstring_write_descrs_fields) == [] + assert expand(ei2.bitstring_readonly_descrs_arrays) == [] + assert expand(ei2.bitstring_write_descrs_arrays) == [] + + assert expand(ei3.bitstring_readonly_descrs_fields) == [f1descr.ei_index] + assert expand(ei3.bitstring_write_descrs_fields) == [f2descr.ei_index] + assert expand(ei3.bitstring_readonly_descrs_arrays) == [0] #a1descr,a2descr + assert expand(ei3.bitstring_write_descrs_arrays) == [] + + for ei in [ei2, ei3]: + for fdescr in [f1descr, f2descr]: + assert ei.check_readonly_descr_field(fdescr) == ( + fdescr in ei._readonly_descrs_fields) + assert ei.check_write_descr_field(fdescr) == ( + fdescr in ei._write_descrs_fields) + for adescr in [a1descr, a2descr]: + assert ei.check_readonly_descr_array(adescr) == ( + adescr in ei._readonly_descrs_arrays) + assert ei.check_write_descr_array(adescr) == ( + adescr in ei._write_descrs_arrays) diff --git a/rpython/jit/codewriter/test/test_flatten.py b/rpython/jit/codewriter/test/test_flatten.py index f98fc3b2e1..b91d21d3fb 100644 --- a/rpython/jit/codewriter/test/test_flatten.py +++ b/rpython/jit/codewriter/test/test_flatten.py @@ -374,7 +374,7 @@ class TestFlatten: def f(i): try: g(i) - except FooError, e: + except FooError as e: return e.num except Exception: return 3 diff --git a/rpython/jit/codewriter/test/test_jtransform.py b/rpython/jit/codewriter/test/test_jtransform.py index e7dd0ec5c5..f9246615d1 100644 --- a/rpython/jit/codewriter/test/test_jtransform.py +++ b/rpython/jit/codewriter/test/test_jtransform.py @@ -1412,7 +1412,7 @@ def test_unknown_operation(): tr = Transformer() try: tr.rewrite_operation(op) - except Exception, e: + except Exception as e: assert 'foobar' in str(e) def test_likely_unlikely(): diff --git a/rpython/jit/codewriter/test/test_regalloc.py b/rpython/jit/codewriter/test/test_regalloc.py index 7d2ac3a265..958f2ce7d8 100644 --- a/rpython/jit/codewriter/test/test_regalloc.py +++ b/rpython/jit/codewriter/test/test_regalloc.py @@ -272,7 +272,7 @@ class TestRegAlloc: kref2 = bar(kref) try: return g(n) - except FooError, e: + except FooError as e: if foo(e): return kref else: diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py index e2f187a884..fcf0c20e9c 100644 --- a/rpython/jit/metainterp/blackhole.py +++ b/rpython/jit/metainterp/blackhole.py @@ -172,7 +172,7 @@ class BlackholeInterpBuilder(object): # call the method bhimpl_xxx() try: result = unboundmethod(*args) - except Exception, e: + except Exception as e: if verbose and not we_are_translated(): print '-> %s!' % (e.__class__.__name__,) if resulttype == 'i' or resulttype == 'r' or resulttype == 'f': @@ -323,7 +323,7 @@ class BlackholeInterpreter(object): break except jitexc.JitException: raise # go through - except Exception, e: + except Exception as e: lle = get_llexception(self.cpu, e) self.handle_exception_in_frame(lle) @@ -1527,9 +1527,9 @@ class BlackholeInterpreter(object): # we now proceed to interpret the bytecode in this frame self.run() # - except jitexc.JitException, e: + except jitexc.JitException as e: raise # go through - except Exception, e: + except Exception as e: # if we get an exception, return it to the caller frame current_exc = get_llexception(self.cpu, e) if not self.nextblackholeinterp: @@ -1660,7 +1660,7 @@ def _handle_jitexception(blackholeinterp, exc): # We have reached a recursive portal level. try: blackholeinterp._handle_jitexception_in_portal(exc) - except Exception, e: + except Exception as e: # It raised a general exception (it should not be a JitException here). lle = get_llexception(blackholeinterp.cpu, e) else: diff --git a/rpython/jit/metainterp/executor.py b/rpython/jit/metainterp/executor.py index dd0c7616ac..80a3d60740 100644 --- a/rpython/jit/metainterp/executor.py +++ b/rpython/jit/metainterp/executor.py @@ -51,28 +51,28 @@ def _do_call(cpu, metainterp, argboxes, descr, rettype): if rettype == INT: try: result = cpu.bh_call_i(func, args_i, args_r, args_f, descr) - except Exception, e: + except Exception as e: metainterp.execute_raised(e) result = 0 return result if rettype == REF: try: result = cpu.bh_call_r(func, args_i, args_r, args_f, descr) - except Exception, e: + except Exception as e: metainterp.execute_raised(e) result = NULL return result if rettype == FLOAT: try: result = cpu.bh_call_f(func, args_i, args_r, args_f, descr) - except Exception, e: + except Exception as e: metainterp.execute_raised(e) result = longlong.ZEROF return result if rettype == VOID: try: cpu.bh_call_v(func, args_i, args_r, args_f, descr) - except Exception, e: + except Exception as e: metainterp.execute_raised(e) return None raise AssertionError("bad rettype") diff --git a/rpython/jit/metainterp/heapcache.py b/rpython/jit/metainterp/heapcache.py index 4c8dd21481..ad4b89070c 100644 --- a/rpython/jit/metainterp/heapcache.py +++ b/rpython/jit/metainterp/heapcache.py @@ -209,7 +209,7 @@ class HeapCache(object): isinstance(argboxes[3], ConstInt) and isinstance(argboxes[4], ConstInt) and isinstance(argboxes[5], ConstInt) and - len(descr.get_extra_info().write_descrs_arrays) == 1): + descr.get_extra_info().single_write_descr_array is not None): # ARRAYCOPY with constant starts and constant length doesn't escape # its argument # XXX really? @@ -299,9 +299,9 @@ class HeapCache(object): isinstance(argboxes[3], ConstInt) and isinstance(argboxes[4], ConstInt) and isinstance(argboxes[5], ConstInt) and - len(effectinfo.write_descrs_arrays) == 1 + effectinfo.single_write_descr_array is not None ): - descr = effectinfo.write_descrs_arrays[0] + descr = effectinfo.single_write_descr_array cache = self.heap_array_cache.get(descr, None) srcstart = argboxes[3].getint() dststart = argboxes[4].getint() @@ -328,10 +328,10 @@ class HeapCache(object): idx_cache._clear_cache_on_write(seen_allocation_of_target) return elif ( - len(effectinfo.write_descrs_arrays) == 1 + effectinfo.single_write_descr_array is not None ): # Fish the descr out of the effectinfo - cache = self.heap_array_cache.get(effectinfo.write_descrs_arrays[0], None) + cache = self.heap_array_cache.get(effectinfo.single_write_descr_array, None) if cache is not None: for idx, cache in cache.iteritems(): cache._clear_cache_on_write(seen_allocation_of_target) diff --git a/rpython/jit/metainterp/history.py b/rpython/jit/metainterp/history.py index c0bfb45a35..042f50553f 100644 --- a/rpython/jit/metainterp/history.py +++ b/rpython/jit/metainterp/history.py @@ -1,3 +1,4 @@ +import sys from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rtyper.lltypesystem import lltype, llmemory, rffi from rpython.rlib.objectmodel import we_are_translated, Symbolic @@ -87,9 +88,10 @@ def repr_rpython(box, typechars): class AbstractDescr(AbstractValue): - __slots__ = ('descr_index',) + __slots__ = ('descr_index', 'ei_index') llopaque = True descr_index = -1 + ei_index = sys.maxint def repr_of_descr(self): return '%r' % (self,) diff --git a/rpython/jit/metainterp/optimizeopt/heap.py b/rpython/jit/metainterp/optimizeopt/heap.py index 57c42829cf..201e6ded0a 100644 --- a/rpython/jit/metainterp/optimizeopt/heap.py +++ b/rpython/jit/metainterp/optimizeopt/heap.py @@ -432,28 +432,35 @@ class OptHeap(Optimization): optimize_GUARD_EXCEPTION = optimize_GUARD_NO_EXCEPTION def force_from_effectinfo(self, effectinfo): - # XXX we can get the wrong complexity here, if the lists - # XXX stored on effectinfo are large - for fielddescr in effectinfo.readonly_descrs_fields: - self.force_lazy_set(fielddescr) - for arraydescr in effectinfo.readonly_descrs_arrays: - self.force_lazy_setarrayitem(arraydescr) - for fielddescr in effectinfo.write_descrs_fields: - if fielddescr.is_always_pure(): - continue - try: - del self.cached_dict_reads[fielddescr] - except KeyError: - pass - self.force_lazy_set(fielddescr, can_cache=False) - for arraydescr in effectinfo.write_descrs_arrays: - self.force_lazy_setarrayitem(arraydescr, can_cache=False) - if arraydescr in self.corresponding_array_descrs: - dictdescr = self.corresponding_array_descrs.pop(arraydescr) + # Note: this version of the code handles effectively + # effectinfos that store arbitrarily many descrs, by looping + # on self.cached_{fields, arrayitems} and looking them up in + # the bitstrings stored in the effectinfo. + for fielddescr, cf in self.cached_fields.items(): + if effectinfo.check_readonly_descr_field(fielddescr): + cf.force_lazy_set(self, fielddescr) + if effectinfo.check_write_descr_field(fielddescr): + if fielddescr.is_always_pure(): + continue + try: + del self.cached_dict_reads[fielddescr] + except KeyError: + pass + cf.force_lazy_set(self, fielddescr, can_cache=False) + # + for arraydescr, submap in self.cached_arrayitems.items(): + if effectinfo.check_readonly_descr_array(arraydescr): + self.force_lazy_setarrayitem_submap(submap) + if effectinfo.check_write_descr_array(arraydescr): + self.force_lazy_setarrayitem_submap(submap, can_cache=False) + # + for arraydescr, dictdescr in self.corresponding_array_descrs.items(): + if effectinfo.check_write_descr_array(arraydescr): try: del self.cached_dict_reads[dictdescr] except KeyError: pass # someone did it already + # if effectinfo.check_forces_virtual_or_virtualizable(): vrefinfo = self.optimizer.metainterp_sd.virtualref_info self.force_lazy_set(vrefinfo.descr_forced) @@ -476,6 +483,10 @@ class OptHeap(Optimization): if indexb is None or indexb.contains(idx): cf.force_lazy_set(self, None, can_cache) + def force_lazy_setarrayitem_submap(self, submap, can_cache=True): + for cf in submap.itervalues(): + cf.force_lazy_set(self, None, can_cache) + def force_all_lazy_sets(self): items = self.cached_fields.items() if not we_are_translated(): diff --git a/rpython/jit/metainterp/optimizeopt/intutils.py b/rpython/jit/metainterp/optimizeopt/intutils.py index 688627f52f..0fae33379a 100644 --- a/rpython/jit/metainterp/optimizeopt/intutils.py +++ b/rpython/jit/metainterp/optimizeopt/intutils.py @@ -1,5 +1,8 @@ +import sys from rpython.rlib.rarithmetic import ovfcheck, LONG_BIT, maxint, is_valid_int from rpython.rlib.objectmodel import we_are_translated +from rpython.rtyper.lltypesystem import lltype +from rpython.rtyper.lltypesystem.lloperation import llop from rpython.jit.metainterp.resoperation import rop, ResOperation from rpython.jit.metainterp.optimizeopt.info import AbstractInfo, INFO_NONNULL,\ INFO_UNKNOWN, INFO_NULL diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py b/rpython/jit/metainterp/optimizeopt/rewrite.py index b26d8ffe48..5b210ce0c7 100644 --- a/rpython/jit/metainterp/optimizeopt/rewrite.py +++ b/rpython/jit/metainterp/optimizeopt/rewrite.py @@ -620,10 +620,10 @@ class OptRewrite(Optimization): and length and ((dest_info and dest_info.is_virtual()) or length.getint() <= 8) and ((source_info and source_info.is_virtual()) or length.getint() <= 8) - and len(extrainfo.write_descrs_arrays) == 1): # <-sanity check + and extrainfo.single_write_descr_array is not None): #<-sanity check source_start = source_start_box.getint() dest_start = dest_start_box.getint() - arraydescr = extrainfo.write_descrs_arrays[0] + arraydescr = extrainfo.single_write_descr_array if arraydescr.is_array_of_structs(): return False # not supported right now diff --git a/rpython/jit/metainterp/optimizeopt/test/test_intbound.py b/rpython/jit/metainterp/optimizeopt/test/test_intbound.py index f974d67264..0dedd21c7a 100644 --- a/rpython/jit/metainterp/optimizeopt/test/test_intbound.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_intbound.py @@ -240,6 +240,8 @@ def test_shift_overflow(): def test_div_bound(): + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop for _, _, b1 in some_bounds(): for _, _, b2 in some_bounds(): b3 = b1.py_div_bound(b2) diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py index b05c86ea15..36c2b5bbbd 100644 --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py @@ -5536,6 +5536,28 @@ class OptimizeOptTest(BaseTestWithUnroll): """ self.optimize_loop(ops, expected) + def test_division_bound_bug(self): + py.test.skip("XXX re-enable") + ops = """ + [i4] + i1 = int_ge(i4, -50) + guard_true(i1) [] + i2 = int_le(i4, -40) + guard_true(i2) [] + # here, -50 <= i4 <= -40 + + i5 = int_floordiv(i4, 30) + # here, we know that that i5 == -1 (C-style handling of negatives!) + escape_n(i5) + jump(i4) + """ + expected = """ + [i4, i5] + escape_n(-1) + jump(i4, -1) + """ + self.optimize_loop(ops, expected) + def test_subsub_ovf(self): ops = """ [i0] diff --git a/rpython/jit/metainterp/optimizeopt/test/test_util.py b/rpython/jit/metainterp/optimizeopt/test/test_util.py index 9bfa2e420b..e5e8aec487 100644 --- a/rpython/jit/metainterp/optimizeopt/test/test_util.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_util.py @@ -10,7 +10,7 @@ from rpython.jit.backend.llgraph import runner from rpython.jit.metainterp.history import (TreeLoop, AbstractDescr, JitCellToken, TargetToken) from rpython.jit.metainterp.optimizeopt.util import sort_descrs, equaloplists -from rpython.jit.codewriter.effectinfo import EffectInfo +from rpython.jit.codewriter.effectinfo import EffectInfo, compute_bitstrings from rpython.jit.metainterp.logger import LogOperations from rpython.jit.tool.oparser import OpParser, pure_parse, convert_loop_to_trace from rpython.jit.metainterp.quasiimmut import QuasiImmutDescr @@ -530,6 +530,7 @@ class BaseTest(object): metainterp_sd.virtualref_info = self.vrefinfo if hasattr(self, 'callinfocollection'): metainterp_sd.callinfocollection = self.callinfocollection + compute_bitstrings(self.cpu.fetch_all_descrs()) # compile_data.enable_opts = self.enable_opts state = optimize_trace(metainterp_sd, None, compile_data) diff --git a/rpython/jit/metainterp/optimizeopt/test/test_zdisable_opts.py b/rpython/jit/metainterp/optimizeopt/test/test_zdisable_opts.py index 890cafd75d..43f372af3e 100644 --- a/rpython/jit/metainterp/optimizeopt/test/test_zdisable_opts.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_zdisable_opts.py @@ -39,7 +39,7 @@ for optnum in range(len(allopts)): def raises(self, e, fn, *args): try: fn(*args) - except Exception, e: + except Exception as e: return e opt = allopts[optnum] diff --git a/rpython/jit/metainterp/optimizeopt/virtualstate.py b/rpython/jit/metainterp/optimizeopt/virtualstate.py index 4088916ad1..55ae54dcae 100644 --- a/rpython/jit/metainterp/optimizeopt/virtualstate.py +++ b/rpython/jit/metainterp/optimizeopt/virtualstate.py @@ -91,7 +91,7 @@ class AbstractVirtualStateInfo(object): state.renum[self.position] = other.position try: self._generate_guards(other, op, runtime_op, state) - except VirtualStatesCantMatch, e: + except VirtualStatesCantMatch as e: state.bad[self] = state.bad[other] = None if e.state is None: e.state = state diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py index bc5f04999a..68489c72d5 100644 --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -1837,7 +1837,11 @@ class MetaInterpStaticData(object): self.cpu.propagate_exception_descr = exc_descr # self.globaldata = MetaInterpGlobalData(self) + + def finish_setup_descrs(self): + from rpython.jit.codewriter import effectinfo self.all_descrs = self.cpu.setup_descrs() + effectinfo.compute_bitstrings(self.all_descrs) def _setup_once(self): """Runtime setup needed by the various components of the JIT.""" @@ -2029,7 +2033,7 @@ class MetaInterp(object): else: try: self.compile_done_with_this_frame(resultbox) - except SwitchToBlackhole, stb: + except SwitchToBlackhole as stb: self.aborted_tracing(stb.reason) sd = self.staticdata result_type = self.jitdriver_sd.result_type @@ -2062,7 +2066,7 @@ class MetaInterp(object): self.popframe() try: self.compile_exit_frame_with_exception(self.last_exc_box) - except SwitchToBlackhole, stb: + except SwitchToBlackhole as stb: self.aborted_tracing(stb.reason) raise jitexc.ExitFrameWithExceptionRef(self.cpu, lltype.cast_opaque_ptr(llmemory.GCREF, excvalue)) @@ -2095,7 +2099,7 @@ class MetaInterp(object): guard_op = self.history.record(opnum, moreargs, lltype.nullptr(llmemory.GCREF.TO)) else: - guard_op = self.history.record(opnum, moreargs, None) + guard_op = self.history.record(opnum, moreargs, None) self.capture_resumedata(resumepc) # ^^^ records extra to history self.staticdata.profiler.count_ops(opnum, Counters.GUARDS) @@ -2249,7 +2253,7 @@ class MetaInterp(object): def execute_raised(self, exception, constant=False): if isinstance(exception, jitexc.JitException): - raise jitexc.JitException, exception # go through + raise exception # go through llexception = jitexc.get_llexception(self.cpu, exception) self.execute_ll_raised(llexception, constant) @@ -2362,7 +2366,7 @@ class MetaInterp(object): self.seen_loop_header_for_jdindex = -1 try: self.interpret() - except SwitchToBlackhole, stb: + except SwitchToBlackhole as stb: self.run_blackhole_interp_to_cancel_tracing(stb) assert False, "should always raise" @@ -2399,7 +2403,7 @@ class MetaInterp(object): if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(Counters.ABORT_BRIDGE) self.interpret() - except SwitchToBlackhole, stb: + except SwitchToBlackhole as stb: self.run_blackhole_interp_to_cancel_tracing(stb) assert False, "should always raise" @@ -3271,7 +3275,7 @@ def _get_opimpl_method(name, argcodes): print '\tpyjitpl: %s(%s)' % (name, ', '.join(map(repr, args))), try: resultbox = unboundmethod(self, *args) - except Exception, e: + except Exception as e: if self.debug: print '-> %s!' % e.__class__.__name__ raise diff --git a/rpython/jit/metainterp/test/support.py b/rpython/jit/metainterp/test/support.py index 5b140eac23..cf2c7b924c 100644 --- a/rpython/jit/metainterp/test/support.py +++ b/rpython/jit/metainterp/test/support.py @@ -132,6 +132,7 @@ def _run_with_pyjitpl(testself, args, stats): metainterp_sd = pyjitpl.MetaInterpStaticData(cw.cpu, opt) stats.metainterp_sd = metainterp_sd metainterp_sd.finish_setup(cw) + metainterp_sd.finish_setup_descrs() [jitdriver_sd] = metainterp_sd.jitdrivers_sd metainterp = pyjitpl.MetaInterp(metainterp_sd, jitdriver_sd) diff --git a/rpython/jit/metainterp/test/test_blackhole.py b/rpython/jit/metainterp/test/test_blackhole.py index c0b23fead7..5b2e0cd7e9 100644 --- a/rpython/jit/metainterp/test/test_blackhole.py +++ b/rpython/jit/metainterp/test/test_blackhole.py @@ -205,7 +205,7 @@ class TestBlackhole(LLJitMixin): myjitdriver.jit_merge_point(x=x, y=y) try: choices(x) - except FooError, e: + except FooError as e: if e.num == 0: break y += e.num diff --git a/rpython/jit/metainterp/test/test_compile.py b/rpython/jit/metainterp/test/test_compile.py index 9afc16408d..3d2e85b969 100644 --- a/rpython/jit/metainterp/test/test_compile.py +++ b/rpython/jit/metainterp/test/test_compile.py @@ -164,7 +164,7 @@ def test_compile_tmp_callback(): fail_descr = cpu.get_latest_descr(deadframe) try: fail_descr.handle_fail(deadframe, FakeMetaInterpSD(), None) - except jitexc.ExitFrameWithExceptionRef, e: + except jitexc.ExitFrameWithExceptionRef as e: assert lltype.cast_opaque_ptr(lltype.Ptr(EXC), e.value) == llexc else: assert 0, "should have raised" diff --git a/rpython/jit/metainterp/test/test_exception.py b/rpython/jit/metainterp/test/test_exception.py index c711e9bfc7..30ad14d3fd 100644 --- a/rpython/jit/metainterp/test/test_exception.py +++ b/rpython/jit/metainterp/test/test_exception.py @@ -17,7 +17,7 @@ class ExceptionTests: def f(n): try: return g(n) - except MyError, e: + except MyError as e: return e.n + 10 res = self.interp_operations(f, [9]) assert res == 8 @@ -141,7 +141,7 @@ class ExceptionTests: try: b(n) return 0 - except MyError, e: + except MyError as e: return e.n def f(n): return a(n) @@ -161,7 +161,7 @@ class ExceptionTests: myjitdriver.jit_merge_point(n=n) try: check(n, 0) - except MyError, e: + except MyError as e: n = check(e.n, 1) return n assert f(53) == -2 @@ -290,7 +290,7 @@ class ExceptionTests: myjitdriver.can_enter_jit(n=n) myjitdriver.jit_merge_point(n=n) n = n - check(n) - except MyError, e: + except MyError as e: return e.n assert f(53) == -2 res = self.meta_interp(f, [53], policy=StopAtXPolicy(check)) @@ -517,7 +517,7 @@ class ExceptionTests: def f(n): try: portal(n) - except SomeException, e: + except SomeException as e: return 3 return 2 @@ -536,7 +536,7 @@ class ExceptionTests: def main(n): try: f(n) - except MyError, e: + except MyError as e: return e.n res = self.meta_interp(main, [41], repeat=7) @@ -572,7 +572,7 @@ class ExceptionTests: try: f(n) return 3 - except MyError, e: + except MyError as e: return e.n except ValueError: return 8 @@ -590,7 +590,7 @@ class ExceptionTests: def f(x): try: return g(x) - except Exception, e: + except Exception as e: if isinstance(e, OverflowError): return -42 raise diff --git a/rpython/jit/metainterp/test/test_heapcache.py b/rpython/jit/metainterp/test/test_heapcache.py index 619504ff37..b8593723af 100644 --- a/rpython/jit/metainterp/test/test_heapcache.py +++ b/rpython/jit/metainterp/test/test_heapcache.py @@ -27,8 +27,12 @@ class FakeEffectinfo(object): def __init__(self, extraeffect, oopspecindex, write_descrs_fields, write_descrs_arrays): self.extraeffect = extraeffect self.oopspecindex = oopspecindex - self.write_descrs_fields = write_descrs_fields - self.write_descrs_arrays = write_descrs_arrays + self._write_descrs_fields = write_descrs_fields + self._write_descrs_arrays = write_descrs_arrays + if len(write_descrs_arrays) == 1: + [self.single_write_descr_array] = write_descrs_arrays + else: + self.single_write_descr_array = None def has_random_effects(self): return self.extraeffect == self.EF_RANDOM_EFFECTS @@ -37,14 +41,14 @@ class FakeCallDescr(object): def __init__(self, extraeffect, oopspecindex=None, write_descrs_fields=[], write_descrs_arrays=[]): self.extraeffect = extraeffect self.oopspecindex = oopspecindex - self.write_descrs_fields = write_descrs_fields - self.write_descrs_arrays = write_descrs_arrays + self.__write_descrs_fields = write_descrs_fields + self.__write_descrs_arrays = write_descrs_arrays def get_extra_info(self): return FakeEffectinfo( self.extraeffect, self.oopspecindex, - write_descrs_fields=self.write_descrs_fields, - write_descrs_arrays=self.write_descrs_arrays, + write_descrs_fields=self.__write_descrs_fields, + write_descrs_arrays=self.__write_descrs_arrays, ) arraycopydescr1 = FakeCallDescr(FakeEffectinfo.EF_CANNOT_RAISE, FakeEffectinfo.OS_ARRAYCOPY, write_descrs_arrays=[descr1]) diff --git a/rpython/jit/metainterp/test/test_recursive.py b/rpython/jit/metainterp/test/test_recursive.py index 39f1a2b643..42cc1afa22 100644 --- a/rpython/jit/metainterp/test/test_recursive.py +++ b/rpython/jit/metainterp/test/test_recursive.py @@ -729,7 +729,7 @@ class RecursiveTests: if codeno == 2: try: portal(1) - except MyException, me: + except MyException as me: i += me.x i += 1 if codeno == 1: @@ -1092,7 +1092,7 @@ class RecursiveTests: if codeno < 10: try: portal(codeno + 5, k+1) - except GotValue, e: + except GotValue as e: i += e.result codeno += 1 elif codeno == 10: @@ -1106,7 +1106,7 @@ class RecursiveTests: def main(codeno, k): try: portal(codeno, k) - except GotValue, e: + except GotValue as e: return e.result assert main(0, 1) == 2095 diff --git a/rpython/jit/metainterp/test/test_virtualizable.py b/rpython/jit/metainterp/test/test_virtualizable.py index 5b9d0d6254..10ec8395b8 100644 --- a/rpython/jit/metainterp/test/test_virtualizable.py +++ b/rpython/jit/metainterp/test/test_virtualizable.py @@ -665,7 +665,7 @@ class ImplicitVirtualizableTests(object): jitdriver.jit_merge_point(frame=frame) try: g() - except FooError, e: + except FooError as e: frame.x -= e.value frame.y += 1 return frame.x diff --git a/rpython/jit/metainterp/test/test_warmspot.py b/rpython/jit/metainterp/test/test_warmspot.py index dfcc99f609..9f4311b474 100644 --- a/rpython/jit/metainterp/test/test_warmspot.py +++ b/rpython/jit/metainterp/test/test_warmspot.py @@ -45,7 +45,7 @@ class TestLLWarmspot(LLJitMixin): def main(a): try: interpreter_loop(a) - except Exit, e: + except Exit as e: return e.result res = self.meta_interp(main, [1]) @@ -624,7 +624,7 @@ class TestWarmspotDirect(object): pass def setup_descrs(self): - pass + return [] def get_latest_descr(self, deadframe): assert isinstance(deadframe, FakeDeadFrame) @@ -674,7 +674,7 @@ class TestWarmspotDirect(object): assert jd._assembler_call_helper(FakeDeadFrame(1), 0) == 10 try: jd._assembler_call_helper(FakeDeadFrame(3), 0) - except LLException, lle: + except LLException as lle: assert lle[0] == self.exc_vtable else: py.test.fail("DID NOT RAISE") diff --git a/rpython/jit/metainterp/warmspot.py b/rpython/jit/metainterp/warmspot.py index c1255b751d..6d65333621 100644 --- a/rpython/jit/metainterp/warmspot.py +++ b/rpython/jit/metainterp/warmspot.py @@ -82,7 +82,7 @@ def jittify_and_run(interp, graph, args, repeat=1, graph_and_interp_only=False, backendopt=False, trace_limit=sys.maxint, inline=False, loop_longevity=0, retrace_limit=5, function_threshold=4, disable_unrolling=sys.maxint, - enable_opts=ALL_OPTS_NAMES, max_retrace_guards=15, + enable_opts=ALL_OPTS_NAMES, max_retrace_guards=15, max_unroll_recursion=7, vec=1, vec_all=0, vec_cost=0, vec_length=60, vec_ratio=2, vec_guard_ratio=3, **kwds): from rpython.config.config import ConfigError @@ -277,6 +277,7 @@ class WarmRunnerDesc(object): for vinfo in vinfos: if vinfo is not None: vinfo.finish() + self.metainterp_sd.finish_setup_descrs() if self.cpu.translate_support_code: self.annhelper.finish() @@ -487,7 +488,7 @@ class WarmRunnerDesc(object): if opencoder_model == 'big': self.metainterp_sd.opencoder_model = BigModel else: - self.metainterp_sd.opencoder_model = Model + self.metainterp_sd.opencoder_model = Model self.stats.metainterp_sd = self.metainterp_sd def make_virtualizable_infos(self): @@ -541,7 +542,7 @@ class WarmRunnerDesc(object): raise # go through except StackOverflow: raise # go through - except Exception, e: + except Exception as e: if not we_are_translated(): print "~~~ Crash in JIT!" print '~~~ %s: %s' % (e.__class__, e) @@ -906,7 +907,7 @@ class WarmRunnerDesc(object): # want to interrupt the whole interpreter loop. return support.maybe_on_top_of_llinterp(rtyper, portal_ptr)(*args) - except jitexc.ContinueRunningNormally, e: + except jitexc.ContinueRunningNormally as e: args = () for ARGTYPE, attrname, count in portalfunc_ARGS: x = getattr(e, attrname)[count] @@ -917,28 +918,28 @@ class WarmRunnerDesc(object): except jitexc.DoneWithThisFrameVoid: assert result_kind == 'void' return - except jitexc.DoneWithThisFrameInt, e: + except jitexc.DoneWithThisFrameInt as e: assert result_kind == 'int' return specialize_value(RESULT, e.result) - except jitexc.DoneWithThisFrameRef, e: + except jitexc.DoneWithThisFrameRef as e: assert result_kind == 'ref' return specialize_value(RESULT, e.result) - except jitexc.DoneWithThisFrameFloat, e: + except jitexc.DoneWithThisFrameFloat as e: assert result_kind == 'float' return specialize_value(RESULT, e.result) - except jitexc.ExitFrameWithExceptionRef, e: + except jitexc.ExitFrameWithExceptionRef as e: value = ts.cast_to_baseclass(e.value) if not we_are_translated(): raise LLException(ts.get_typeptr(value), value) else: value = cast_base_ptr_to_instance(Exception, value) - raise Exception, value + raise value def handle_jitexception(e): # XXX the bulk of this function is mostly a copy-paste from above try: raise e - except jitexc.ContinueRunningNormally, e: + except jitexc.ContinueRunningNormally as e: args = () for ARGTYPE, attrname, count in portalfunc_ARGS: x = getattr(e, attrname)[count] @@ -951,22 +952,22 @@ class WarmRunnerDesc(object): except jitexc.DoneWithThisFrameVoid: assert result_kind == 'void' return - except jitexc.DoneWithThisFrameInt, e: + except jitexc.DoneWithThisFrameInt as e: assert result_kind == 'int' return e.result - except jitexc.DoneWithThisFrameRef, e: + except jitexc.DoneWithThisFrameRef as e: assert result_kind == 'ref' return e.result - except jitexc.DoneWithThisFrameFloat, e: + except jitexc.DoneWithThisFrameFloat as e: assert result_kind == 'float' return e.result - except jitexc.ExitFrameWithExceptionRef, e: + except jitexc.ExitFrameWithExceptionRef as e: value = ts.cast_to_baseclass(e.value) if not we_are_translated(): raise LLException(ts.get_typeptr(value), value) else: value = cast_base_ptr_to_instance(Exception, value) - raise Exception, value + raise value jd._ll_portal_runner = ll_portal_runner # for debugging jd.portal_runner_ptr = self.helper_func(jd._PTR_PORTAL_FUNCTYPE, @@ -984,7 +985,7 @@ class WarmRunnerDesc(object): fail_descr = self.cpu.get_latest_descr(deadframe) try: fail_descr.handle_fail(deadframe, self.metainterp_sd, jd) - except jitexc.JitException, e: + except jitexc.JitException as e: return handle_jitexception(e) else: assert 0, "should have raised" diff --git a/rpython/jit/tl/test/test_pypyjit.py b/rpython/jit/tl/test/test_pypyjit.py index 6fd4c63563..a496d4f6c8 100644 --- a/rpython/jit/tl/test/test_pypyjit.py +++ b/rpython/jit/tl/test/test_pypyjit.py @@ -21,7 +21,7 @@ def teardown_module(mod): def check_crasher(func_name): try: JIT_EXECUTABLE.sysexec(CRASH_FILE, func_name) - except py.process.cmdexec.Error, e: + except py.process.cmdexec.Error as e: print "stderr" print "------" print e.err diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py index f6718e0463..01ef82103e 100644 --- a/rpython/memory/gc/base.py +++ b/rpython/memory/gc/base.py @@ -6,6 +6,7 @@ from rpython.memory.support import DEFAULT_CHUNK_SIZE from rpython.memory.support import get_address_stack, get_address_deque from rpython.memory.support import AddressDict, null_address_dict from rpython.rtyper.lltypesystem.llmemory import NULL, raw_malloc_usage +from rpython.rtyper.annlowlevel import cast_adr_to_nongc_instance TYPEID_MAP = lltype.GcStruct('TYPEID_MAP', ('count', lltype.Signed), ('size', lltype.Signed), @@ -36,8 +37,15 @@ class GCBase(object): def setup(self): # all runtime mutable values' setup should happen here # and in its overriden versions! for the benefit of test_transformed_gc - self.finalizer_lock_count = 0 - self.run_finalizers = self.AddressDeque() + self.finalizer_lock = False + self.run_old_style_finalizers = self.AddressDeque() + + def mark_finalizer_to_run(self, fq_index, obj): + if fq_index == -1: # backward compatibility with old-style finalizer + self.run_old_style_finalizers.append(obj) + return + handlers = self.finalizer_handlers() + self._adr2deque(handlers[fq_index].deque).append(obj) def post_setup(self): # More stuff that needs to be initialized when the GC is already @@ -60,8 +68,9 @@ class GCBase(object): def set_query_functions(self, is_varsize, has_gcptr_in_varsize, is_gcarrayofgcptr, - getfinalizer, - getlightfinalizer, + finalizer_handlers, + destructor_or_custom_trace, + is_old_style_finalizer, offsets_to_gc_pointers, fixed_size, varsize_item_sizes, varsize_offset_to_variable_part, @@ -74,8 +83,9 @@ class GCBase(object): fast_path_tracing, has_gcptr, cannot_pin): - self.getfinalizer = getfinalizer - self.getlightfinalizer = getlightfinalizer + self.finalizer_handlers = finalizer_handlers + self.destructor_or_custom_trace = destructor_or_custom_trace + self.is_old_style_finalizer = is_old_style_finalizer self.is_varsize = is_varsize self.has_gcptr_in_varsize = has_gcptr_in_varsize self.is_gcarrayofgcptr = is_gcarrayofgcptr @@ -136,8 +146,10 @@ class GCBase(object): the four malloc_[fixed,var]size[_clear]() functions. """ size = self.fixed_size(typeid) - needs_finalizer = bool(self.getfinalizer(typeid)) - finalizer_is_light = bool(self.getlightfinalizer(typeid)) + needs_finalizer = (bool(self.destructor_or_custom_trace(typeid)) + and not self.has_custom_trace(typeid)) + finalizer_is_light = (needs_finalizer and + not self.is_old_style_finalizer(typeid)) contains_weakptr = self.weakpointer_offset(typeid) >= 0 assert not (needs_finalizer and contains_weakptr) if self.is_varsize(typeid): @@ -323,9 +335,44 @@ class GCBase(object): callback2, attrname = _convert_callback_formats(callback) # :-/ setattr(self, attrname, arg) self.root_walker.walk_roots(callback2, callback2, callback2) - self.run_finalizers.foreach(callback, arg) + self.enum_pending_finalizers(callback, arg) enumerate_all_roots._annspecialcase_ = 'specialize:arg(1)' + def enum_pending_finalizers(self, callback, arg): + self.run_old_style_finalizers.foreach(callback, arg) + handlers = self.finalizer_handlers() + i = 0 + while i < len(handlers): + self._adr2deque(handlers[i].deque).foreach(callback, arg) + i += 1 + enum_pending_finalizers._annspecialcase_ = 'specialize:arg(1)' + + def _copy_pending_finalizers_deque(self, deque, copy_fn): + tmp = self.AddressDeque() + while deque.non_empty(): + obj = deque.popleft() + tmp.append(copy_fn(obj)) + while tmp.non_empty(): + deque.append(tmp.popleft()) + tmp.delete() + + def copy_pending_finalizers(self, copy_fn): + "NOTE: not very efficient, but only for SemiSpaceGC and subclasses" + self._copy_pending_finalizers_deque( + self.run_old_style_finalizers, copy_fn) + handlers = self.finalizer_handlers() + i = 0 + while i < len(handlers): + h = handlers[i] + self._copy_pending_finalizers_deque( + self._adr2deque(h.deque), copy_fn) + i += 1 + + def call_destructor(self, obj): + destructor = self.destructor_or_custom_trace(self.get_type_id(obj)) + ll_assert(bool(destructor), "no destructor found") + destructor(obj) + def debug_check_consistency(self): """To use after a collection. If self.DEBUG is set, this enumerates all roots and traces all objects to check if we didn't @@ -364,18 +411,25 @@ class GCBase(object): def debug_check_object(self, obj): pass + def _adr2deque(self, adr): + return cast_adr_to_nongc_instance(self.AddressDeque, adr) + def execute_finalizers(self): - self.finalizer_lock_count += 1 + if self.finalizer_lock: + return # the outer invocation of execute_finalizers() will do it + self.finalizer_lock = True try: - while self.run_finalizers.non_empty(): - if self.finalizer_lock_count > 1: - # the outer invocation of execute_finalizers() will do it - break - obj = self.run_finalizers.popleft() - finalizer = self.getfinalizer(self.get_type_id(obj)) - finalizer(obj) + handlers = self.finalizer_handlers() + i = 0 + while i < len(handlers): + if self._adr2deque(handlers[i].deque).non_empty(): + handlers[i].trigger() + i += 1 + while self.run_old_style_finalizers.non_empty(): + obj = self.run_old_style_finalizers.popleft() + self.call_destructor(obj) finally: - self.finalizer_lock_count -= 1 + self.finalizer_lock = False class MovingGCBase(GCBase): diff --git a/rpython/memory/gc/generation.py b/rpython/memory/gc/generation.py index 6ba79667f8..18263061bc 100644 --- a/rpython/memory/gc/generation.py +++ b/rpython/memory/gc/generation.py @@ -355,6 +355,7 @@ class GenerationGC(SemiSpaceGC): scan = beginning = self.free self.collect_oldrefs_to_nursery() self.collect_roots_in_nursery() + self.collect_young_objects_with_finalizers() scan = self.scan_objects_just_copied_out_of_nursery(scan) # at this point, all static and old objects have got their # GCFLAG_NO_YOUNG_PTRS set again by trace_and_drag_out_of_nursery @@ -422,6 +423,19 @@ class GenerationGC(SemiSpaceGC): if self.is_in_nursery(obj): root.address[0] = self.copy(obj) + def collect_young_objects_with_finalizers(self): + # XXX always walk the whole 'objects_with_finalizers' list here + new = self.AddressDeque() + while self.objects_with_finalizers.non_empty(): + obj = self.objects_with_finalizers.popleft() + fq_nr = self.objects_with_finalizers.popleft() + if self.is_in_nursery(obj): + obj = self.copy(obj) + new.append(obj) + new.append(fq_nr) + self.objects_with_finalizers.delete() + self.objects_with_finalizers = new + def scan_objects_just_copied_out_of_nursery(self, scan): while scan < self.free: curr = scan + self.size_gc_header() diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py index 349dc96290..09f932df17 100644 --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -372,10 +372,19 @@ class IncrementalMiniMarkGC(MovingGCBase): self.gc_state = STATE_SCANNING # - # A list of all objects with finalizers (these are never young). - self.objects_with_finalizers = self.AddressDeque() - self.young_objects_with_light_finalizers = self.AddressStack() - self.old_objects_with_light_finalizers = self.AddressStack() + # Two lists of all objects with finalizers. Actually they are lists + # of pairs (finalization_queue_nr, object). "probably young objects" + # are all traced and moved to the "old" list by the next minor + # collection. + self.probably_young_objects_with_finalizers = self.AddressDeque() + self.old_objects_with_finalizers = self.AddressDeque() + p = lltype.malloc(self._ADDRARRAY, 1, flavor='raw', + track_allocation=False) + self.singleaddr = llmemory.cast_ptr_to_adr(p) + # + # Two lists of all objects with destructors. + self.young_objects_with_destructors = self.AddressStack() + self.old_objects_with_destructors = self.AddressStack() # # Two lists of the objects with weakrefs. No weakref can be an # old object weakly pointing to a young object: indeed, weakrefs @@ -559,14 +568,14 @@ class IncrementalMiniMarkGC(MovingGCBase): # set up extra stuff for PYPY_GC_DEBUG. MovingGCBase.post_setup(self) if self.DEBUG and llarena.has_protect: - # gc debug mode: allocate 23 nurseries instead of just 1, + # gc debug mode: allocate 7 nurseries instead of just 1, # and use them alternatively, while mprotect()ing the unused # ones to detect invalid access. debug_start("gc-debug") self.debug_rotating_nurseries = lltype.malloc( - NURSARRAY, 22, flavor='raw', track_allocation=False) + NURSARRAY, 6, flavor='raw', track_allocation=False) i = 0 - while i < 22: + while i < 6: nurs = self._alloc_nursery() llarena.arena_protect(nurs, self._nursery_memory_size(), True) self.debug_rotating_nurseries[i] = nurs @@ -609,15 +618,18 @@ class IncrementalMiniMarkGC(MovingGCBase): # If the object needs a finalizer, ask for a rawmalloc. # The following check should be constant-folded. if needs_finalizer and not is_finalizer_light: + # old-style finalizers only! ll_assert(not contains_weakptr, "'needs_finalizer' and 'contains_weakptr' both specified") obj = self.external_malloc(typeid, 0, alloc_young=False) - self.objects_with_finalizers.append(obj) + res = llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) + self.register_finalizer(-1, res) + return res # # If totalsize is greater than nonlarge_max (which should never be # the case in practice), ask for a rawmalloc. The following check # should be constant-folded. - elif rawtotalsize > self.nonlarge_max: + if rawtotalsize > self.nonlarge_max: ll_assert(not contains_weakptr, "'contains_weakptr' specified for a large object") obj = self.external_malloc(typeid, 0, alloc_young=True) @@ -639,14 +651,14 @@ class IncrementalMiniMarkGC(MovingGCBase): # Build the object. llarena.arena_reserve(result, totalsize) obj = result + size_gc_header - if is_finalizer_light: - self.young_objects_with_light_finalizers.append(obj) self.init_gc_object(result, typeid, flags=0) - # - # If it is a weakref, record it (check constant-folded). - if contains_weakptr: - self.young_objects_with_weakrefs.append(obj) # + # If it is a weakref or has a lightweight destructor, record it + # (checks constant-folded). + if needs_finalizer: + self.young_objects_with_destructors.append(obj) + if contains_weakptr: + self.young_objects_with_weakrefs.append(obj) return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) @@ -850,6 +862,7 @@ class IncrementalMiniMarkGC(MovingGCBase): collect_and_reserve._dont_inline_ = True + # XXX kill alloc_young and make it always True def external_malloc(self, typeid, length, alloc_young): """Allocate a large object using the ArenaCollection or raw_malloc(), possibly as an object with card marking enabled, @@ -1565,6 +1578,13 @@ class IncrementalMiniMarkGC(MovingGCBase): self.header(shadow).tid |= GCFLAG_VISITED new_shadow_object_dict.setitem(obj, shadow) + def register_finalizer(self, fq_index, gcobj): + from rpython.rtyper.lltypesystem import rffi + obj = llmemory.cast_ptr_to_adr(gcobj) + fq_index = rffi.cast(llmemory.Address, fq_index) + self.probably_young_objects_with_finalizers.append(obj) + self.probably_young_objects_with_finalizers.append(fq_index) + # ---------- # Nursery collection @@ -1632,6 +1652,11 @@ class IncrementalMiniMarkGC(MovingGCBase): if self.rrc_enabled: self.rrc_minor_collection_trace() # + # visit the "probably young" objects with finalizers. They + # always all survive. + if self.probably_young_objects_with_finalizers.non_empty(): + self.deal_with_young_objects_with_finalizers() + # while True: # If we are using card marking, do a partial trace of the arrays # that are flagged with GCFLAG_CARDS_SET. @@ -1657,8 +1682,8 @@ class IncrementalMiniMarkGC(MovingGCBase): # weakrefs' targets. if self.young_objects_with_weakrefs.non_empty(): self.invalidate_young_weakrefs() - if self.young_objects_with_light_finalizers.non_empty(): - self.deal_with_young_objects_with_finalizers() + if self.young_objects_with_destructors.non_empty(): + self.deal_with_young_objects_with_destructors() # # Clear this mapping. Without pinned objects we just clear the dict # as all objects in the nursery are dragged out of the nursery and, if @@ -1706,7 +1731,6 @@ class IncrementalMiniMarkGC(MovingGCBase): llarena.arena_reset(prev, pinned_obj_size, 3) else: llarena.arena_reset(prev, pinned_obj_size, 0) - # XXX: debug_rotate_nursery missing here # # clean up object's flags obj = cur + size_gc_header @@ -1722,6 +1746,8 @@ class IncrementalMiniMarkGC(MovingGCBase): # reset everything after the last pinned object till the end of the arena if self.gc_nursery_debug: llarena.arena_reset(prev, self.nursery + self.nursery_size - prev, 3) + if not nursery_barriers.non_empty(): # no pinned objects + self.debug_rotate_nursery() else: llarena.arena_reset(prev, self.nursery + self.nursery_size - prev, 0) # @@ -1731,7 +1757,6 @@ class IncrementalMiniMarkGC(MovingGCBase): self.nursery_barriers = nursery_barriers self.surviving_pinned_objects.delete() # - # XXX gc-minimark-pinning does a debug_rotate_nursery() here (groggi) self.nursery_free = self.nursery self.nursery_top = self.nursery_barriers.popleft() # @@ -2220,7 +2245,10 @@ class IncrementalMiniMarkGC(MovingGCBase): if self.rrc_enabled: self.rrc_major_collection_trace() # - if self.objects_with_finalizers.non_empty(): + ll_assert(not (self.probably_young_objects_with_finalizers + .non_empty()), + "probably_young_objects_with_finalizers should be empty") + if self.old_objects_with_finalizers.non_empty(): self.deal_with_objects_with_finalizers() elif self.old_objects_with_weakrefs.non_empty(): # Weakref support: clear the weak pointers to dying objects @@ -2236,9 +2264,9 @@ class IncrementalMiniMarkGC(MovingGCBase): self.more_objects_to_trace.delete() # - # Light finalizers - if self.old_objects_with_light_finalizers.non_empty(): - self.deal_with_old_objects_with_finalizers() + # Destructors + if self.old_objects_with_destructors.non_empty(): + self.deal_with_old_objects_with_destructors() # objects_to_trace processed fully, can move on to sweeping self.ac.mass_free_prepare() self.start_free_rawmalloc_objects() @@ -2407,7 +2435,7 @@ class IncrementalMiniMarkGC(MovingGCBase): # # If we are in an inner collection caused by a call to a finalizer, # the 'run_finalizers' objects also need to be kept alive. - self.run_finalizers.foreach(self._collect_obj, None) + self.enum_pending_finalizers(self._collect_obj, None) def enumerate_all_roots(self, callback, arg): self.prebuilt_root_objects.foreach(callback, arg) @@ -2572,41 +2600,45 @@ class IncrementalMiniMarkGC(MovingGCBase): # ---------- # Finalizers - def deal_with_young_objects_with_finalizers(self): - """ This is a much simpler version of dealing with finalizers - and an optimization - we can reasonably assume that those finalizers - don't do anything fancy and *just* call them. Among other things + def deal_with_young_objects_with_destructors(self): + """We can reasonably assume that destructors don't do + anything fancy and *just* call them. Among other things they won't resurrect objects """ - while self.young_objects_with_light_finalizers.non_empty(): - obj = self.young_objects_with_light_finalizers.pop() + while self.young_objects_with_destructors.non_empty(): + obj = self.young_objects_with_destructors.pop() if not self.is_forwarded(obj): - finalizer = self.getlightfinalizer(self.get_type_id(obj)) - ll_assert(bool(finalizer), "no light finalizer found") - finalizer(obj) + self.call_destructor(obj) else: obj = self.get_forwarding_address(obj) - self.old_objects_with_light_finalizers.append(obj) + self.old_objects_with_destructors.append(obj) - def deal_with_old_objects_with_finalizers(self): - """ This is a much simpler version of dealing with finalizers - and an optimization - we can reasonably assume that those finalizers - don't do anything fancy and *just* call them. Among other things + def deal_with_old_objects_with_destructors(self): + """We can reasonably assume that destructors don't do + anything fancy and *just* call them. Among other things they won't resurrect objects """ new_objects = self.AddressStack() - while self.old_objects_with_light_finalizers.non_empty(): - obj = self.old_objects_with_light_finalizers.pop() + while self.old_objects_with_destructors.non_empty(): + obj = self.old_objects_with_destructors.pop() if self.header(obj).tid & GCFLAG_VISITED: # surviving new_objects.append(obj) else: # dying - finalizer = self.getlightfinalizer(self.get_type_id(obj)) - ll_assert(bool(finalizer), "no light finalizer found") - finalizer(obj) - self.old_objects_with_light_finalizers.delete() - self.old_objects_with_light_finalizers = new_objects + self.call_destructor(obj) + self.old_objects_with_destructors.delete() + self.old_objects_with_destructors = new_objects + + def deal_with_young_objects_with_finalizers(self): + while self.probably_young_objects_with_finalizers.non_empty(): + obj = self.probably_young_objects_with_finalizers.popleft() + fq_nr = self.probably_young_objects_with_finalizers.popleft() + self.singleaddr.address[0] = obj + self._trace_drag_out1(self.singleaddr) + obj = self.singleaddr.address[0] + self.old_objects_with_finalizers.append(obj) + self.old_objects_with_finalizers.append(fq_nr) def deal_with_objects_with_finalizers(self): # Walk over list of objects with finalizers. @@ -2619,14 +2651,17 @@ class IncrementalMiniMarkGC(MovingGCBase): marked = self.AddressDeque() pending = self.AddressStack() self.tmpstack = self.AddressStack() - while self.objects_with_finalizers.non_empty(): - x = self.objects_with_finalizers.popleft() + while self.old_objects_with_finalizers.non_empty(): + x = self.old_objects_with_finalizers.popleft() + fq_nr = self.old_objects_with_finalizers.popleft() ll_assert(self._finalization_state(x) != 1, "bad finalization state 1") if self.header(x).tid & GCFLAG_VISITED: new_with_finalizer.append(x) + new_with_finalizer.append(fq_nr) continue marked.append(x) + marked.append(fq_nr) pending.append(x) while pending.non_empty(): y = pending.pop() @@ -2646,22 +2681,26 @@ class IncrementalMiniMarkGC(MovingGCBase): while marked.non_empty(): x = marked.popleft() + fq_nr = marked.popleft() state = self._finalization_state(x) ll_assert(state >= 2, "unexpected finalization state < 2") if state == 2: - self.run_finalizers.append(x) + from rpython.rtyper.lltypesystem import rffi + fq_index = rffi.cast(lltype.Signed, fq_nr) + self.mark_finalizer_to_run(fq_index, x) # we must also fix the state from 2 to 3 here, otherwise # we leave the GCFLAG_FINALIZATION_ORDERING bit behind # which will confuse the next collection self._recursively_bump_finalization_state_from_2_to_3(x) else: new_with_finalizer.append(x) + new_with_finalizer.append(fq_nr) self.tmpstack.delete() pending.delete() marked.delete() - self.objects_with_finalizers.delete() - self.objects_with_finalizers = new_with_finalizer + self.old_objects_with_finalizers.delete() + self.old_objects_with_finalizers = new_with_finalizer def _append_if_nonnull(pointer, stack): stack.append(pointer.address[0]) @@ -2814,9 +2853,6 @@ class IncrementalMiniMarkGC(MovingGCBase): self.rrc_o_list_old = self.AddressStack() self.rrc_p_dict = self.AddressDict() # non-nursery keys only self.rrc_p_dict_nurs = self.AddressDict() # nursery keys only - p = lltype.malloc(self._ADDRARRAY, 1, flavor='raw', - track_allocation=False) - self.rrc_singleaddr = llmemory.cast_ptr_to_adr(p) self.rrc_dealloc_trigger_callback = dealloc_trigger_callback self.rrc_dealloc_pending = self.AddressStack() self.rrc_enabled = True @@ -2886,7 +2922,7 @@ class IncrementalMiniMarkGC(MovingGCBase): self.rrc_p_dict_nurs.delete() self.rrc_p_dict_nurs = self.AddressDict(length_estimate) self.rrc_p_list_young.foreach(self._rrc_minor_trace, - self.rrc_singleaddr) + self.singleaddr) def _rrc_minor_trace(self, pyobject, singleaddr): from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY @@ -2899,7 +2935,7 @@ class IncrementalMiniMarkGC(MovingGCBase): # force the corresponding object to be alive intobj = self._pyobj(pyobject).ob_pypy_link singleaddr.address[0] = llmemory.cast_int_to_adr(intobj) - self._trace_drag_out(singleaddr, llmemory.NULL) + self._trace_drag_out1(singleaddr) def rrc_minor_collection_free(self): ll_assert(self.rrc_p_dict_nurs.length() == 0, "p_dict_nurs not empty 1") diff --git a/rpython/memory/gc/minimark.py b/rpython/memory/gc/minimark.py index 969c27cbee..52d7d1138f 100644 --- a/rpython/memory/gc/minimark.py +++ b/rpython/memory/gc/minimark.py @@ -153,6 +153,8 @@ class MiniMarkGC(MovingGCBase): # ^^^ prebuilt objects may have the flag GCFLAG_HAS_SHADOW; # then they are one word longer, the extra word storing the hash. + _ADDRARRAY = lltype.Array(llmemory.Address, hints={'nolength': True}) + # During a minor collection, the objects in the nursery that are # moved outside are changed in-place: their header is replaced with @@ -309,10 +311,19 @@ class MiniMarkGC(MovingGCBase): self.old_rawmalloced_objects = self.AddressStack() self.rawmalloced_total_size = r_uint(0) # - # A list of all objects with finalizers (these are never young). - self.objects_with_finalizers = self.AddressDeque() - self.young_objects_with_light_finalizers = self.AddressStack() - self.old_objects_with_light_finalizers = self.AddressStack() + # Two lists of all objects with finalizers. Actually they are lists + # of pairs (finalization_queue_nr, object). "probably young objects" + # are all traced and moved to the "old" list by the next minor + # collection. + self.probably_young_objects_with_finalizers = self.AddressDeque() + self.old_objects_with_finalizers = self.AddressDeque() + p = lltype.malloc(self._ADDRARRAY, 1, flavor='raw', + track_allocation=False) + self.singleaddr = llmemory.cast_ptr_to_adr(p) + # + # Two lists of all objects with destructors. + self.young_objects_with_destructors = self.AddressStack() + self.old_objects_with_destructors = self.AddressStack() # # Two lists of the objects with weakrefs. No weakref can be an # old object weakly pointing to a young object: indeed, weakrefs @@ -517,15 +528,18 @@ class MiniMarkGC(MovingGCBase): # If the object needs a finalizer, ask for a rawmalloc. # The following check should be constant-folded. if needs_finalizer and not is_finalizer_light: + # old-style finalizers only! ll_assert(not contains_weakptr, "'needs_finalizer' and 'contains_weakptr' both specified") obj = self.external_malloc(typeid, 0, alloc_young=False) - self.objects_with_finalizers.append(obj) + res = llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) + self.register_finalizer(-1, res) + return res # # If totalsize is greater than nonlarge_max (which should never be # the case in practice), ask for a rawmalloc. The following check # should be constant-folded. - elif rawtotalsize > self.nonlarge_max: + if rawtotalsize > self.nonlarge_max: ll_assert(not contains_weakptr, "'contains_weakptr' specified for a large object") obj = self.external_malloc(typeid, 0, alloc_young=True) @@ -547,14 +561,14 @@ class MiniMarkGC(MovingGCBase): # Build the object. llarena.arena_reserve(result, totalsize) obj = result + size_gc_header - if is_finalizer_light: - self.young_objects_with_light_finalizers.append(obj) self.init_gc_object(result, typeid, flags=0) - # - # If it is a weakref, record it (check constant-folded). - if contains_weakptr: - self.young_objects_with_weakrefs.append(obj) # + # If it is a weakref or has a lightweight destructor, record it + # (checks constant-folded). + if needs_finalizer: + self.young_objects_with_destructors.append(obj) + if contains_weakptr: + self.young_objects_with_weakrefs.append(obj) return llmemory.cast_adr_to_ptr(obj, llmemory.GCREF) @@ -676,6 +690,7 @@ class MiniMarkGC(MovingGCBase): collect_and_reserve._dont_inline_ = True + # XXX kill alloc_young and make it always True def external_malloc(self, typeid, length, alloc_young): """Allocate a large object using the ArenaCollection or raw_malloc(), possibly as an object with card marking enabled, @@ -1241,6 +1256,13 @@ class MiniMarkGC(MovingGCBase): self.old_objects_with_cards_set.append(dest_addr) dest_hdr.tid |= GCFLAG_CARDS_SET + def register_finalizer(self, fq_index, gcobj): + from rpython.rtyper.lltypesystem import rffi + obj = llmemory.cast_ptr_to_adr(gcobj) + fq_index = rffi.cast(llmemory.Address, fq_index) + self.probably_young_objects_with_finalizers.append(obj) + self.probably_young_objects_with_finalizers.append(fq_index) + # ---------- # Nursery collection @@ -1264,6 +1286,11 @@ class MiniMarkGC(MovingGCBase): # 'old_objects_pointing_to_young'. self.collect_roots_in_nursery() # + # visit the "probably young" objects with finalizers. They + # always all survive. + if self.probably_young_objects_with_finalizers.non_empty(): + self.deal_with_young_objects_with_finalizers() + # while True: # If we are using card marking, do a partial trace of the arrays # that are flagged with GCFLAG_CARDS_SET. @@ -1288,8 +1315,8 @@ class MiniMarkGC(MovingGCBase): # weakrefs' targets. if self.young_objects_with_weakrefs.non_empty(): self.invalidate_young_weakrefs() - if self.young_objects_with_light_finalizers.non_empty(): - self.deal_with_young_objects_with_finalizers() + if self.young_objects_with_destructors.non_empty(): + self.deal_with_young_objects_with_destructors() # # Clear this mapping. if self.nursery_objects_shadows.length() > 0: @@ -1613,7 +1640,7 @@ class MiniMarkGC(MovingGCBase): # with a finalizer and all objects reachable from there (and also # moves some objects from 'objects_with_finalizers' to # 'run_finalizers'). - if self.objects_with_finalizers.non_empty(): + if self.old_objects_with_finalizers.non_empty(): self.deal_with_objects_with_finalizers() # self.objects_to_trace.delete() @@ -1621,8 +1648,8 @@ class MiniMarkGC(MovingGCBase): # Weakref support: clear the weak pointers to dying objects if self.old_objects_with_weakrefs.non_empty(): self.invalidate_old_weakrefs() - if self.old_objects_with_light_finalizers.non_empty(): - self.deal_with_old_objects_with_finalizers() + if self.old_objects_with_destructors.non_empty(): + self.deal_with_old_objects_with_destructors() # # Walk all rawmalloced objects and free the ones that don't @@ -1745,8 +1772,8 @@ class MiniMarkGC(MovingGCBase): # # If we are in an inner collection caused by a call to a finalizer, # the 'run_finalizers' objects also need to be kept alive. - self.run_finalizers.foreach(self._collect_obj, - self.objects_to_trace) + self.enum_pending_finalizers(self._collect_obj, + self.objects_to_trace) def enumerate_all_roots(self, callback, arg): self.prebuilt_root_objects.foreach(callback, arg) @@ -1878,41 +1905,45 @@ class MiniMarkGC(MovingGCBase): # ---------- # Finalizers - def deal_with_young_objects_with_finalizers(self): - """ This is a much simpler version of dealing with finalizers - and an optimization - we can reasonably assume that those finalizers - don't do anything fancy and *just* call them. Among other things + def deal_with_young_objects_with_destructors(self): + """We can reasonably assume that destructors don't do + anything fancy and *just* call them. Among other things they won't resurrect objects """ - while self.young_objects_with_light_finalizers.non_empty(): - obj = self.young_objects_with_light_finalizers.pop() + while self.young_objects_with_destructors.non_empty(): + obj = self.young_objects_with_destructors.pop() if not self.is_forwarded(obj): - finalizer = self.getlightfinalizer(self.get_type_id(obj)) - ll_assert(bool(finalizer), "no light finalizer found") - finalizer(obj) + self.call_destructor(obj) else: obj = self.get_forwarding_address(obj) - self.old_objects_with_light_finalizers.append(obj) + self.old_objects_with_destructors.append(obj) - def deal_with_old_objects_with_finalizers(self): - """ This is a much simpler version of dealing with finalizers - and an optimization - we can reasonably assume that those finalizers - don't do anything fancy and *just* call them. Among other things + def deal_with_old_objects_with_destructors(self): + """We can reasonably assume that destructors don't do + anything fancy and *just* call them. Among other things they won't resurrect objects """ new_objects = self.AddressStack() - while self.old_objects_with_light_finalizers.non_empty(): - obj = self.old_objects_with_light_finalizers.pop() + while self.old_objects_with_destructors.non_empty(): + obj = self.old_objects_with_destructors.pop() if self.header(obj).tid & GCFLAG_VISITED: # surviving new_objects.append(obj) else: # dying - finalizer = self.getlightfinalizer(self.get_type_id(obj)) - ll_assert(bool(finalizer), "no light finalizer found") - finalizer(obj) - self.old_objects_with_light_finalizers.delete() - self.old_objects_with_light_finalizers = new_objects + self.call_destructor(obj) + self.old_objects_with_destructors.delete() + self.old_objects_with_destructors = new_objects + + def deal_with_young_objects_with_finalizers(self): + while self.probably_young_objects_with_finalizers.non_empty(): + obj = self.probably_young_objects_with_finalizers.popleft() + fq_nr = self.probably_young_objects_with_finalizers.popleft() + self.singleaddr.address[0] = obj + self._trace_drag_out1(self.singleaddr) + obj = self.singleaddr.address[0] + self.old_objects_with_finalizers.append(obj) + self.old_objects_with_finalizers.append(fq_nr) def deal_with_objects_with_finalizers(self): # Walk over list of objects with finalizers. @@ -1925,14 +1956,17 @@ class MiniMarkGC(MovingGCBase): marked = self.AddressDeque() pending = self.AddressStack() self.tmpstack = self.AddressStack() - while self.objects_with_finalizers.non_empty(): - x = self.objects_with_finalizers.popleft() + while self.old_objects_with_finalizers.non_empty(): + x = self.old_objects_with_finalizers.popleft() + fq_nr = self.old_objects_with_finalizers.popleft() ll_assert(self._finalization_state(x) != 1, "bad finalization state 1") if self.header(x).tid & GCFLAG_VISITED: new_with_finalizer.append(x) + new_with_finalizer.append(fq_nr) continue marked.append(x) + marked.append(fq_nr) pending.append(x) while pending.non_empty(): y = pending.pop() @@ -1946,22 +1980,26 @@ class MiniMarkGC(MovingGCBase): while marked.non_empty(): x = marked.popleft() + fq_nr = marked.popleft() state = self._finalization_state(x) ll_assert(state >= 2, "unexpected finalization state < 2") if state == 2: - self.run_finalizers.append(x) + from rpython.rtyper.lltypesystem import rffi + fq_index = rffi.cast(lltype.Signed, fq_nr) + self.mark_finalizer_to_run(fq_index, x) # we must also fix the state from 2 to 3 here, otherwise # we leave the GCFLAG_FINALIZATION_ORDERING bit behind # which will confuse the next collection self._recursively_bump_finalization_state_from_2_to_3(x) else: new_with_finalizer.append(x) + new_with_finalizer.append(fq_nr) self.tmpstack.delete() pending.delete() marked.delete() - self.objects_with_finalizers.delete() - self.objects_with_finalizers = new_with_finalizer + self.old_objects_with_finalizers.delete() + self.old_objects_with_finalizers = new_with_finalizer def _append_if_nonnull(pointer, stack): stack.append(pointer.address[0]) diff --git a/rpython/memory/gc/semispace.py b/rpython/memory/gc/semispace.py index 7d9d10a73f..1283d4fc66 100644 --- a/rpython/memory/gc/semispace.py +++ b/rpython/memory/gc/semispace.py @@ -111,7 +111,9 @@ class SemiSpaceGC(MovingGCBase): # self.objects_with_light_finalizers.append(result + size_gc_header) #else: if has_finalizer: + from rpython.rtyper.lltypesystem import rffi self.objects_with_finalizers.append(result + size_gc_header) + self.objects_with_finalizers.append(rffi.cast(llmemory.Address, -1)) if contains_weakptr: self.objects_with_weakrefs.append(result + size_gc_header) return llmemory.cast_adr_to_ptr(result+size_gc_header, llmemory.GCREF) @@ -149,6 +151,13 @@ class SemiSpaceGC(MovingGCBase): else: return False + def register_finalizer(self, fq_index, gcobj): + from rpython.rtyper.lltypesystem import rffi + obj = llmemory.cast_ptr_to_adr(gcobj) + fq_index = rffi.cast(llmemory.Address, fq_index) + self.objects_with_finalizers.append(obj) + self.objects_with_finalizers.append(fq_index) + def obtain_free_space(self, needed): # a bit of tweaking to maximize the performance and minimize the # amount of code in an inlined version of malloc_fixedsize_clear() @@ -268,8 +277,7 @@ class SemiSpaceGC(MovingGCBase): scan = self.free = tospace self.starting_full_collect() self.collect_roots() - if self.run_finalizers.non_empty(): - self.update_run_finalizers() + self.copy_pending_finalizers(self.copy) scan = self.scan_copied(scan) if self.objects_with_light_finalizers.non_empty(): self.deal_with_objects_with_light_finalizers() @@ -499,8 +507,7 @@ class SemiSpaceGC(MovingGCBase): if self.surviving(obj): new_objects.append(self.get_forwarding_address(obj)) else: - finalizer = self.getfinalizer(self.get_type_id(obj)) - finalizer(obj) + self.call_destructor(obj) self.objects_with_light_finalizers.delete() self.objects_with_light_finalizers = new_objects @@ -517,12 +524,15 @@ class SemiSpaceGC(MovingGCBase): self.tmpstack = self.AddressStack() while self.objects_with_finalizers.non_empty(): x = self.objects_with_finalizers.popleft() + fq_nr = self.objects_with_finalizers.popleft() ll_assert(self._finalization_state(x) != 1, "bad finalization state 1") if self.surviving(x): new_with_finalizer.append(self.get_forwarding_address(x)) + new_with_finalizer.append(fq_nr) continue marked.append(x) + marked.append(fq_nr) pending.append(x) while pending.non_empty(): y = pending.pop() @@ -537,17 +547,21 @@ class SemiSpaceGC(MovingGCBase): while marked.non_empty(): x = marked.popleft() + fq_nr = marked.popleft() state = self._finalization_state(x) ll_assert(state >= 2, "unexpected finalization state < 2") newx = self.get_forwarding_address(x) if state == 2: - self.run_finalizers.append(newx) + from rpython.rtyper.lltypesystem import rffi + fq_index = rffi.cast(lltype.Signed, fq_nr) + self.mark_finalizer_to_run(fq_index, newx) # we must also fix the state from 2 to 3 here, otherwise # we leave the GCFLAG_FINALIZATION_ORDERING bit behind # which will confuse the next collection self._recursively_bump_finalization_state_from_2_to_3(x) else: new_with_finalizer.append(newx) + new_with_finalizer.append(fq_nr) self.tmpstack.delete() pending.delete() @@ -627,16 +641,6 @@ class SemiSpaceGC(MovingGCBase): self.objects_with_weakrefs.delete() self.objects_with_weakrefs = new_with_weakref - def update_run_finalizers(self): - # we are in an inner collection, caused by a finalizer - # the run_finalizers objects need to be copied - new_run_finalizer = self.AddressDeque() - while self.run_finalizers.non_empty(): - obj = self.run_finalizers.popleft() - new_run_finalizer.append(self.copy(obj)) - self.run_finalizers.delete() - self.run_finalizers = new_run_finalizer - def _is_external(self, obj): return (self.header(obj).tid & GCFLAG_EXTERNAL) != 0 diff --git a/rpython/memory/gc/test/test_direct.py b/rpython/memory/gc/test/test_direct.py index 64ba975aaf..cc4576601d 100644 --- a/rpython/memory/gc/test/test_direct.py +++ b/rpython/memory/gc/test/test_direct.py @@ -8,7 +8,7 @@ see as the list of roots (stack and prebuilt objects). import py from rpython.rtyper.lltypesystem import lltype, llmemory -from rpython.memory.gctypelayout import TypeLayoutBuilder +from rpython.memory.gctypelayout import TypeLayoutBuilder, FIN_HANDLER_ARRAY from rpython.rlib.rarithmetic import LONG_BIT, is_valid_int from rpython.memory.gc import minimark, incminimark from rpython.memory.gctypelayout import zero_gc_pointers_inside, zero_gc_pointers @@ -84,7 +84,9 @@ class BaseDirectGCTest(object): self.gc.set_root_walker(self.rootwalker) self.layoutbuilder = TypeLayoutBuilder(self.GCClass) self.get_type_id = self.layoutbuilder.get_type_id - self.layoutbuilder.initialize_gc_query_function(self.gc) + gcdata = self.layoutbuilder.initialize_gc_query_function(self.gc) + ll_handlers = lltype.malloc(FIN_HANDLER_ARRAY, 0, immortal=True) + gcdata.finalizer_handlers = llmemory.cast_ptr_to_adr(ll_handlers) self.gc.setup() def consider_constant(self, p): @@ -617,7 +619,7 @@ class TestIncrementalMiniMarkGCSimple(TestMiniMarkGCSimple): oldhdr = self.gc.header(llmemory.cast_ptr_to_adr(oldobj)) assert oldhdr.tid & incminimark.GCFLAG_VISITED == 0 - self.gc.minor_collection() + self.gc._minor_collection() self.gc.visit_all_objects_step(1) assert oldhdr.tid & incminimark.GCFLAG_VISITED @@ -628,7 +630,7 @@ class TestIncrementalMiniMarkGCSimple(TestMiniMarkGCSimple): assert self.gc.header(self.gc.old_objects_pointing_to_young.tolist()[0]) == oldhdr - self.gc.minor_collection() + self.gc._minor_collection() self.gc.debug_check_consistency() def test_sweeping_simple(self): diff --git a/rpython/memory/gc/test/test_rawrefcount.py b/rpython/memory/gc/test/test_rawrefcount.py index 508f9dc66a..120d4db7ee 100644 --- a/rpython/memory/gc/test/test_rawrefcount.py +++ b/rpython/memory/gc/test/test_rawrefcount.py @@ -22,7 +22,7 @@ class TestRawRefCount(BaseDirectGCTest): if major: self.gc.collect() else: - self.gc.minor_collection() + self.gc._minor_collection() count1 = len(self.trigger) self.gc.rrc_invoke_callback() count2 = len(self.trigger) diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py index 25f72d32f6..fde0b676f8 100644 --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -46,11 +46,12 @@ class BoehmGCTransformer(GCTransformer): ll_malloc_varsize_no_length, [lltype.Signed]*3, llmemory.Address, inline=False) self.malloc_varsize_ptr = self.inittime_helper( ll_malloc_varsize, [lltype.Signed]*4, llmemory.Address, inline=False) - self.weakref_create_ptr = self.inittime_helper( - ll_weakref_create, [llmemory.Address], llmemory.WeakRefPtr, - inline=False) - self.weakref_deref_ptr = self.inittime_helper( - ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address) + if self.translator.config.translation.rweakref: + self.weakref_create_ptr = self.inittime_helper( + ll_weakref_create, [llmemory.Address], llmemory.WeakRefPtr, + inline=False) + self.weakref_deref_ptr = self.inittime_helper( + ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address) self.identityhash_ptr = self.inittime_helper( ll_identityhash, [llmemory.Address], lltype.Signed, inline=False) diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py index 8a7c0f06ad..d5302a8989 100644 --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -9,8 +9,10 @@ from rpython.rtyper.lltypesystem.lloperation import LL_OPERATIONS, llop from rpython.memory import gctypelayout from rpython.memory.gctransform.log import log from rpython.memory.gctransform.support import get_rtti, ll_call_destructor +from rpython.memory.gctransform.support import ll_report_finalizer_error from rpython.memory.gctransform.transform import GCTransformer from rpython.memory.gctypelayout import ll_weakref_deref, WEAKREF, WEAKREFPTR +from rpython.memory.gctypelayout import FIN_TRIGGER_FUNC, FIN_HANDLER_ARRAY from rpython.tool.sourcetools import func_with_new_name from rpython.translator.backendopt import graphanalyze from rpython.translator.backendopt.finalizer import FinalizerAnalyzer @@ -181,8 +183,11 @@ class BaseFrameworkGCTransformer(GCTransformer): gcdata.max_type_id = 13 # patched in finish() gcdata.typeids_z = a_random_address # patched in finish() gcdata.typeids_list = a_random_address # patched in finish() + gcdata.finalizer_handlers = a_random_address # patched in finish() self.gcdata = gcdata self.malloc_fnptr_cache = {} + self.finalizer_queue_indexes = {} + self.finalizer_handlers = [] gcdata.gc = GCClass(translator.config.translation, **GC_PARAMS) root_walker = self.build_root_walker() @@ -217,6 +222,7 @@ class BaseFrameworkGCTransformer(GCTransformer): data_classdef.generalize_attr('max_type_id', annmodel.SomeInteger()) data_classdef.generalize_attr('typeids_z', SomeAddress()) data_classdef.generalize_attr('typeids_list', SomeAddress()) + data_classdef.generalize_attr('finalizer_handlers', SomeAddress()) annhelper = annlowlevel.MixLevelHelperAnnotator(self.translator.rtyper) @@ -236,8 +242,9 @@ class BaseFrameworkGCTransformer(GCTransformer): annmodel.s_None) self.annotate_walker_functions(getfn) - self.weakref_deref_ptr = self.inittime_helper( - ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address) + if translator.config.translation.rweakref: + self.weakref_deref_ptr = self.inittime_helper( + ll_weakref_deref, [llmemory.WeakRefPtr], llmemory.Address) classdef = bk.getuniqueclassdef(GCClass) s_gc = annmodel.SomeInstance(classdef) @@ -554,6 +561,12 @@ class BaseFrameworkGCTransformer(GCTransformer): [s_gc, s_typeid16], s_gcref) + self.register_finalizer_ptr = getfn(GCClass.register_finalizer, + [s_gc, + annmodel.SomeInteger(), + s_gcref], + annmodel.s_None) + def create_custom_trace_funcs(self, gc, rtyper): custom_trace_funcs = tuple(rtyper.custom_trace_funcs) rtyper.custom_trace_funcs = custom_trace_funcs @@ -680,6 +693,16 @@ class BaseFrameworkGCTransformer(GCTransformer): ll_instance.inst_typeids_list= llmemory.cast_ptr_to_adr(ll_typeids_list) newgcdependencies.append(ll_typeids_list) # + handlers = self.finalizer_handlers + ll_handlers = lltype.malloc(FIN_HANDLER_ARRAY, len(handlers), + immortal=True) + for i in range(len(handlers)): + ll_handlers[i].deque = handlers[i][0] + ll_handlers[i].trigger = handlers[i][1] + ll_instance.inst_finalizer_handlers = llmemory.cast_ptr_to_adr( + ll_handlers) + newgcdependencies.append(ll_handlers) + # return newgcdependencies def get_finish_tables(self): @@ -771,10 +794,8 @@ class BaseFrameworkGCTransformer(GCTransformer): info = self.layoutbuilder.get_info(type_id) c_size = rmodel.inputconst(lltype.Signed, info.fixedsize) fptrs = self.special_funcptr_for_type(TYPE) - has_finalizer = "finalizer" in fptrs - has_light_finalizer = "light_finalizer" in fptrs - if has_light_finalizer: - has_finalizer = True + has_finalizer = "destructor" in fptrs or "old_style_finalizer" in fptrs + has_light_finalizer = "destructor" in fptrs c_has_finalizer = rmodel.inputconst(lltype.Bool, has_finalizer) c_has_light_finalizer = rmodel.inputconst(lltype.Bool, has_light_finalizer) @@ -1497,6 +1518,60 @@ class BaseFrameworkGCTransformer(GCTransformer): return None return getattr(obj, '_hash_cache_', None) + def get_finalizer_queue_index(self, hop): + fq_tag = hop.spaceop.args[0].value + assert 'FinalizerQueue TAG' in fq_tag.expr + fq = fq_tag.default + try: + index = self.finalizer_queue_indexes[fq] + except KeyError: + index = len(self.finalizer_queue_indexes) + assert index == len(self.finalizer_handlers) + deque = self.gcdata.gc.AddressDeque() + # + def ll_finalizer_trigger(): + try: + fq.finalizer_trigger() + except Exception as e: + ll_report_finalizer_error(e) + ll_trigger = self.annotate_finalizer(ll_finalizer_trigger, [], + lltype.Void) + def ll_next_dead(): + if deque.non_empty(): + return deque.popleft() + else: + return llmemory.NULL + ll_next_dead = self.annotate_finalizer(ll_next_dead, [], + llmemory.Address) + c_ll_next_dead = rmodel.inputconst(lltype.typeOf(ll_next_dead), + ll_next_dead) + # + s_deque = self.translator.annotator.bookkeeper.immutablevalue(deque) + r_deque = self.translator.rtyper.getrepr(s_deque) + ll_deque = r_deque.convert_const(deque) + adr_deque = llmemory.cast_ptr_to_adr(ll_deque) + # + self.finalizer_handlers.append((adr_deque, ll_trigger, + c_ll_next_dead)) + self.finalizer_queue_indexes[fq] = index + return index + + def gct_gc_fq_register(self, hop): + index = self.get_finalizer_queue_index(hop) + c_index = rmodel.inputconst(lltype.Signed, index) + v_ptr = hop.spaceop.args[1] + v_ptr = hop.genop("cast_opaque_ptr", [v_ptr], + resulttype=llmemory.GCREF) + hop.genop("direct_call", [self.register_finalizer_ptr, self.c_const_gc, + c_index, v_ptr]) + + def gct_gc_fq_next_dead(self, hop): + index = self.get_finalizer_queue_index(hop) + c_ll_next_dead = self.finalizer_handlers[index][2] + v_adr = hop.genop("direct_call", [c_ll_next_dead], + resulttype=llmemory.Address) + hop.genop("cast_adr_to_ptr", [v_adr], + resultvar = hop.spaceop.result) class TransformerLayoutBuilder(gctypelayout.TypeLayoutBuilder): @@ -1512,22 +1587,18 @@ class TransformerLayoutBuilder(gctypelayout.TypeLayoutBuilder): self.translator = translator super(TransformerLayoutBuilder, self).__init__(GCClass, lltype2vtable) - def has_finalizer(self, TYPE): + def has_destructor(self, TYPE): rtti = get_rtti(TYPE) return rtti is not None and getattr(rtti._obj, 'destructor_funcptr', None) - def has_light_finalizer(self, TYPE): - fptrs = self.special_funcptr_for_type(TYPE) - return "light_finalizer" in fptrs - def has_custom_trace(self, TYPE): rtti = get_rtti(TYPE) return rtti is not None and getattr(rtti._obj, 'custom_trace_funcptr', None) - def make_finalizer_funcptr_for_type(self, TYPE): - if not self.has_finalizer(TYPE): + def make_destructor_funcptr_for_type(self, TYPE): + if not self.has_destructor(TYPE): return None, False rtti = get_rtti(TYPE) destrptr = rtti._obj.destructor_funcptr diff --git a/rpython/memory/gctransform/support.py b/rpython/memory/gctransform/support.py index 42c35f1b08..0edf8d5a40 100644 --- a/rpython/memory/gctransform/support.py +++ b/rpython/memory/gctransform/support.py @@ -80,7 +80,7 @@ def write(fd, string): def ll_call_destructor(destrptr, destr_v, typename): try: destrptr(destr_v) - except Exception, e: + except Exception as e: try: write(2, "a destructor of type ") write(2, typename) @@ -89,3 +89,11 @@ def ll_call_destructor(destrptr, destr_v, typename): write(2, " ignoring it\n") except: pass + +def ll_report_finalizer_error(e): + try: + write(2, "triggering finalizers raised an exception ") + write(2, str(e)) + write(2, " ignoring it\n") + except: + pass diff --git a/rpython/memory/gctransform/transform.py b/rpython/memory/gctransform/transform.py index 9fcc994075..8453fefbd0 100644 --- a/rpython/memory/gctransform/transform.py +++ b/rpython/memory/gctransform/transform.py @@ -129,7 +129,7 @@ class BaseGCTransformer(object): raise_analyzer, cleanup=False) must_constfold = True - except inline.CannotInline, e: + except inline.CannotInline as e: print 'CANNOT INLINE:', e print '\t%s into %s' % (inline_graph, graph) cleanup_graph(graph) diff --git a/rpython/memory/gctypelayout.py b/rpython/memory/gctypelayout.py index 735d1cf32e..3d4d6037e8 100644 --- a/rpython/memory/gctypelayout.py +++ b/rpython/memory/gctypelayout.py @@ -17,16 +17,17 @@ class GCData(object): OFFSETS_TO_GC_PTR = lltype.Array(lltype.Signed) - # A custom tracer (CT), enumerates the addresses that contain GCREFs. - # It is called with the object as first argument, and the previous - # returned address (or NULL the first time) as the second argument. - FINALIZER_FUNC = lltype.FuncType([llmemory.Address], lltype.Void) - FINALIZER = lltype.Ptr(FINALIZER_FUNC) + # A CUSTOM_FUNC is either a destructor, or a custom tracer. + # A destructor is called when the object is about to be freed. + # A custom tracer (CT) enumerates the addresses that contain GCREFs. + # Both are called with the address of the object as only argument. + CUSTOM_FUNC = lltype.FuncType([llmemory.Address], lltype.Void) + CUSTOM_FUNC_PTR = lltype.Ptr(CUSTOM_FUNC) # structure describing the layout of a typeid TYPE_INFO = lltype.Struct("type_info", ("infobits", lltype.Signed), # combination of the T_xxx consts - ("finalizer", FINALIZER), + ("customfunc", CUSTOM_FUNC_PTR), ("fixedsize", lltype.Signed), ("ofstoptrs", lltype.Ptr(OFFSETS_TO_GC_PTR)), hints={'immutable': True}, @@ -80,16 +81,18 @@ class GCData(object): def q_cannot_pin(self, typeid): typeinfo = self.get(typeid) ANY = (T_HAS_GCPTR | T_IS_WEAKREF) - return (typeinfo.infobits & ANY) != 0 or bool(typeinfo.finalizer) + return (typeinfo.infobits & ANY) != 0 or bool(typeinfo.customfunc) - def q_finalizer(self, typeid): - return self.get(typeid).finalizer + def q_finalizer_handlers(self): + adr = self.finalizer_handlers # set from framework.py or gcwrapper.py + return llmemory.cast_adr_to_ptr(adr, lltype.Ptr(FIN_HANDLER_ARRAY)) - def q_light_finalizer(self, typeid): + def q_destructor_or_custom_trace(self, typeid): + return self.get(typeid).customfunc + + def q_is_old_style_finalizer(self, typeid): typeinfo = self.get(typeid) - if typeinfo.infobits & T_HAS_LIGHTWEIGHT_FINALIZER: - return typeinfo.finalizer - return lltype.nullptr(GCData.FINALIZER_FUNC) + return (typeinfo.infobits & T_HAS_OLDSTYLE_FINALIZER) != 0 def q_offsets_to_gc_pointers(self, typeid): return self.get(typeid).ofstoptrs @@ -141,8 +144,9 @@ class GCData(object): self.q_is_varsize, self.q_has_gcptr_in_varsize, self.q_is_gcarrayofgcptr, - self.q_finalizer, - self.q_light_finalizer, + self.q_finalizer_handlers, + self.q_destructor_or_custom_trace, + self.q_is_old_style_finalizer, self.q_offsets_to_gc_pointers, self.q_fixed_size, self.q_varsize_item_sizes, @@ -170,7 +174,7 @@ T_IS_GCARRAY_OF_GCPTR = 0x040000 T_IS_WEAKREF = 0x080000 T_IS_RPYTHON_INSTANCE = 0x100000 # the type is a subclass of OBJECT T_HAS_CUSTOM_TRACE = 0x200000 -T_HAS_LIGHTWEIGHT_FINALIZER = 0x400000 +T_HAS_OLDSTYLE_FINALIZER = 0x400000 T_HAS_GCPTR = 0x1000000 T_KEY_MASK = intmask(0xFE000000) # bug detection only T_KEY_VALUE = intmask(0x5A000000) # bug detection only @@ -199,11 +203,11 @@ def encode_type_shape(builder, info, TYPE, index): # fptrs = builder.special_funcptr_for_type(TYPE) if fptrs: - if "finalizer" in fptrs: - info.finalizer = fptrs["finalizer"] - if "light_finalizer" in fptrs: - info.finalizer = fptrs["light_finalizer"] - infobits |= T_HAS_LIGHTWEIGHT_FINALIZER + if "destructor" in fptrs: + info.customfunc = fptrs["destructor"] + if "old_style_finalizer" in fptrs: + info.customfunc = fptrs["old_style_finalizer"] + infobits |= T_HAS_OLDSTYLE_FINALIZER # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( @@ -373,21 +377,21 @@ class TypeLayoutBuilder(object): def special_funcptr_for_type(self, TYPE): if TYPE in self._special_funcptrs: return self._special_funcptrs[TYPE] - fptr1, is_lightweight = self.make_finalizer_funcptr_for_type(TYPE) + fptr1, is_lightweight = self.make_destructor_funcptr_for_type(TYPE) fptr2 = self.make_custom_trace_funcptr_for_type(TYPE) result = {} if fptr1: if is_lightweight: - result["light_finalizer"] = fptr1 + result["destructor"] = fptr1 else: - result["finalizer"] = fptr1 + result["old_style_finalizer"] = fptr1 if fptr2: result["custom_trace"] = fptr2 self._special_funcptrs[TYPE] = result return result - def make_finalizer_funcptr_for_type(self, TYPE): - # must be overridden for proper finalizer support + def make_destructor_funcptr_for_type(self, TYPE): + # must be overridden for proper destructor support return None, False def make_custom_trace_funcptr_for_type(self, TYPE): @@ -546,3 +550,9 @@ def convert_weakref_to(targetptr): link = lltype.malloc(WEAKREF, immortal=True) link.weakptr = llmemory.cast_ptr_to_adr(targetptr) return link + +########## finalizers ########## + +FIN_TRIGGER_FUNC = lltype.FuncType([], lltype.Void) +FIN_HANDLER_ARRAY = lltype.Array(('deque', llmemory.Address), + ('trigger', lltype.Ptr(FIN_TRIGGER_FUNC))) diff --git a/rpython/memory/gcwrapper.py b/rpython/memory/gcwrapper.py index 688c07de5c..9e98627ed9 100644 --- a/rpython/memory/gcwrapper.py +++ b/rpython/memory/gcwrapper.py @@ -1,7 +1,7 @@ from rpython.translator.backendopt.finalizer import FinalizerAnalyzer from rpython.rtyper.lltypesystem import lltype, llmemory, llheap -from rpython.rtyper import llinterp -from rpython.rtyper.annlowlevel import llhelper +from rpython.rtyper import llinterp, rclass +from rpython.rtyper.annlowlevel import llhelper, cast_nongc_instance_to_adr from rpython.memory import gctypelayout from rpython.flowspace.model import Constant @@ -15,6 +15,7 @@ class GCManagedHeap(object): chunk_size = 10, translated_to_c = False, **GC_PARAMS) + self.translator = translator self.gc.set_root_walker(LLInterpRootWalker(self)) self.gc.DEBUG = True self.llinterp = llinterp @@ -30,6 +31,11 @@ class GCManagedHeap(object): self.llinterp) self.get_type_id = layoutbuilder.get_type_id gcdata = layoutbuilder.initialize_gc_query_function(self.gc) + self.gcdata = gcdata + + self.finalizer_queue_indexes = {} + self.finalizer_handlers = [] + self.update_finalizer_handlers() constants = collect_constants(flowgraphs) for obj in constants: @@ -187,6 +193,55 @@ class GCManagedHeap(object): def thread_run(self): pass + def _get_finalizer_trigger(self, fq): + graph = self.translator._graphof(fq.finalizer_trigger.im_func) + def ll_trigger(): + try: + self.llinterp.eval_graph(graph, [None], recursive=True) + except llinterp.LLException: + raise RuntimeError( + "finalizer_trigger() raised an exception, shouldn't happen") + return ll_trigger + + def update_finalizer_handlers(self): + handlers = self.finalizer_handlers + ll_handlers = lltype.malloc(gctypelayout.FIN_HANDLER_ARRAY, + len(handlers), immortal=True) + for i in range(len(handlers)): + fq, deque = handlers[i] + ll_handlers[i].deque = cast_nongc_instance_to_adr(deque) + ll_handlers[i].trigger = llhelper( + lltype.Ptr(gctypelayout.FIN_TRIGGER_FUNC), + self._get_finalizer_trigger(fq)) + self.gcdata.finalizer_handlers = llmemory.cast_ptr_to_adr(ll_handlers) + + def get_finalizer_queue_index(self, fq_tag): + assert 'FinalizerQueue TAG' in fq_tag.expr + fq = fq_tag.default + try: + index = self.finalizer_queue_indexes[fq] + except KeyError: + index = len(self.finalizer_handlers) + self.finalizer_queue_indexes[fq] = index + deque = self.gc.AddressDeque() + self.finalizer_handlers.append((fq, deque)) + self.update_finalizer_handlers() + return index + + def gc_fq_next_dead(self, fq_tag): + index = self.get_finalizer_queue_index(fq_tag) + deque = self.finalizer_handlers[index][1] + if deque.non_empty(): + obj = deque.popleft() + else: + obj = llmemory.NULL + return llmemory.cast_adr_to_ptr(obj, rclass.OBJECTPTR) + + def gc_fq_register(self, fq_tag, ptr): + index = self.get_finalizer_queue_index(fq_tag) + ptr = lltype.cast_opaque_ptr(llmemory.GCREF, ptr) + self.gc.register_finalizer(index, ptr) + # ____________________________________________________________ class LLInterpRootWalker: @@ -228,7 +283,7 @@ class DirectRunLayoutBuilder(gctypelayout.TypeLayoutBuilder): self.llinterp = llinterp super(DirectRunLayoutBuilder, self).__init__(GCClass, lltype2vtable) - def make_finalizer_funcptr_for_type(self, TYPE): + def make_destructor_funcptr_for_type(self, TYPE): from rpython.memory.gctransform.support import get_rtti rtti = get_rtti(TYPE) if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'): @@ -239,15 +294,17 @@ class DirectRunLayoutBuilder(gctypelayout.TypeLayoutBuilder): return None, False t = self.llinterp.typer.annotator.translator - light = not FinalizerAnalyzer(t).analyze_light_finalizer(destrgraph) - def ll_finalizer(addr): + is_light = not FinalizerAnalyzer(t).analyze_light_finalizer(destrgraph) + + def ll_destructor(addr): try: v = llmemory.cast_adr_to_ptr(addr, DESTR_ARG) self.llinterp.eval_graph(destrgraph, [v], recursive=True) except llinterp.LLException: raise RuntimeError( - "a finalizer raised an exception, shouldn't happen") - return llhelper(gctypelayout.GCData.FINALIZER, ll_finalizer), light + "a destructor raised an exception, shouldn't happen") + return (llhelper(gctypelayout.GCData.CUSTOM_FUNC_PTR, ll_destructor), + is_light) def make_custom_trace_funcptr_for_type(self, TYPE): from rpython.memory.gctransform.support import get_rtti diff --git a/rpython/memory/support.py b/rpython/memory/support.py index 326a08ae41..799b08f0e5 100644 --- a/rpython/memory/support.py +++ b/rpython/memory/support.py @@ -2,6 +2,9 @@ from rpython.rtyper.lltypesystem import lltype, llmemory from rpython.rlib.objectmodel import free_non_gc_object, we_are_translated from rpython.rlib.debug import ll_assert from rpython.tool.identity_dict import identity_dict +from rpython.rtyper.rclass import NONGCOBJECTPTR +from rpython.rtyper.annlowlevel import cast_nongc_instance_to_base_ptr +from rpython.rtyper.annlowlevel import cast_base_ptr_to_nongc_instance def mangle_hash(i): @@ -292,6 +295,9 @@ def get_address_deque(chunk_size=DEFAULT_CHUNK_SIZE, cache={}): cur = next free_non_gc_object(self) + def _was_freed(self): + return False # otherwise, the __class__ changes + cache[chunk_size] = AddressDeque return AddressDeque diff --git a/rpython/memory/test/gc_test_base.py b/rpython/memory/test/gc_test_base.py index d3541b48bf..4d3ea09409 100644 --- a/rpython/memory/test/gc_test_base.py +++ b/rpython/memory/test/gc_test_base.py @@ -128,7 +128,7 @@ class GCTest(object): assert res == concat(100) #assert simulator.current_size - curr < 16000 * INT_SIZE / 4 - def test_finalizer(self): + def test_destructor(self): class B(object): pass b = B() @@ -152,7 +152,7 @@ class GCTest(object): res = self.interpret(f, [5]) assert res == 6 - def test_finalizer_calls_malloc(self): + def test_old_style_finalizer(self): class B(object): pass b = B() @@ -163,17 +163,160 @@ class GCTest(object): self.id = b.nextid b.nextid += 1 def __del__(self): + llop.gc__collect(lltype.Void) b.num_deleted += 1 - C() + def f(x): + a = A() + i = 0 + while i < x: + i += 1 + a = A() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + return b.num_deleted + res = self.interpret(f, [5]) + assert res == 6 + + def test_finalizer(self): + class B(object): + pass + b = B() + b.nextid = 0 + b.num_deleted = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while self.next_dead() is not None: + b.num_deleted += 1 + fq = FQ() + def f(x): + a = A() + i = 0 + while i < x: + i += 1 + a = A() + a = None + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + return b.num_deleted + res = self.interpret(f, [5]) + assert res == 6 + + def test_finalizer_delaying_next_dead(self): + class B(object): + pass + b = B() + b.nextid = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + b.triggered += 1 + fq = FQ() + def g(): # indirection to avoid leaking the result for too long + A() + def f(x): + b.triggered = 0 + g() + i = 0 + while i < x: + i += 1 + g() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + assert b.triggered > 0 + g(); g() # two more + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + num_deleted = 0 + while fq.next_dead() is not None: + num_deleted += 1 + return num_deleted + 1000 * b.triggered + res = self.interpret(f, [5]) + assert res in (3008, 4008, 5008), "res == %d" % (res,) + + def test_finalizer_two_queues_in_sequence(self): + class B(object): + pass + b = B() + b.nextid = 0 + b.num_deleted_1 = 0 + b.num_deleted_2 = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + fq1.register_finalizer(self) + class FQ1(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + b.num_deleted_1 += 1 + fq2.register_finalizer(a) + class FQ2(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while self.next_dead() is not None: + b.num_deleted_2 += 1 + fq1 = FQ1() + fq2 = FQ2() + def f(x): + A() + i = 0 + while i < x: + i += 1 + A() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + return b.num_deleted_1 + b.num_deleted_2 * 1000 + res = self.interpret(f, [5]) + assert res == 6006 + + def test_finalizer_calls_malloc(self): + class B(object): + pass + b = B() + b.nextid = 0 + b.num_deleted = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + fq.register_finalizer(self) class C(A): - def __del__(self): - b.num_deleted += 1 + pass + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + b.num_deleted += 1 + if not isinstance(a, C): + C() + fq = FQ() def f(x): a = A() i = 0 while i < x: i += 1 a = A() + a = None llop.gc__collect(lltype.Void) llop.gc__collect(lltype.Void) return b.num_deleted @@ -190,15 +333,21 @@ class GCTest(object): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - b.num_deleted += 1 - llop.gc__collect(lltype.Void) + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while self.next_dead() is not None: + b.num_deleted += 1 + llop.gc__collect(lltype.Void) + fq = FQ() def f(x): a = A() i = 0 while i < x: i += 1 a = A() + a = None llop.gc__collect(lltype.Void) llop.gc__collect(lltype.Void) return b.num_deleted @@ -215,20 +364,29 @@ class GCTest(object): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - b.num_deleted += 1 - b.a = self + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + b.num_deleted += 1 + b.a = a + fq = FQ() def f(x): a = A() i = 0 while i < x: i += 1 a = A() + a = None llop.gc__collect(lltype.Void) llop.gc__collect(lltype.Void) aid = b.a.id b.a = None - # check that __del__ is not called again + # check that finalizer_trigger() is not called again llop.gc__collect(lltype.Void) llop.gc__collect(lltype.Void) return b.num_deleted * 10 + aid + 100 * (b.a is None) @@ -290,7 +448,7 @@ class GCTest(object): res = self.interpret(f, []) assert res - def test_weakref_to_object_with_finalizer(self): + def test_weakref_to_object_with_destructor(self): import weakref class A(object): count = 0 @@ -310,6 +468,32 @@ class GCTest(object): res = self.interpret(f, []) assert res + def test_weakref_to_object_with_finalizer(self): + import weakref + class A(object): + count = 0 + a = A() + class B(object): + pass + class FQ(rgc.FinalizerQueue): + Class = B + def finalizer_trigger(self): + while self.next_dead() is not None: + a.count += 1 + fq = FQ() + def g(): + b = B() + fq.register_finalizer(b) + return weakref.ref(b) + def f(): + ref = g() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + result = a.count == 1 and (ref() is None) + return result + res = self.interpret(f, []) + assert res + def test_bug_1(self): import weakref class B(object): @@ -329,23 +513,32 @@ class GCTest(object): res = self.interpret(f, []) assert res - def test_cycle_with_weakref_and_del(self): + def test_cycle_with_weakref_and_finalizer(self): import weakref class A(object): count = 0 a = A() class B(object): - def __del__(self): - # when __del__ is called, the weakref to c should be dead - if self.ref() is None: - a.count += 10 # ok - else: - a.count = 666 # not ok + pass + class FQ(rgc.FinalizerQueue): + Class = B + def finalizer_trigger(self): + while True: + b = self.next_dead() + if b is None: + break + # when we are here, the weakref to c should be dead + if b.ref() is None: + a.count += 10 # ok + else: + a.count = 666 # not ok + fq = FQ() class C(object): pass def g(): c = C() c.b = B() + fq.register_finalizer(c.b) ref = weakref.ref(c) c.b.ref = ref return ref @@ -365,23 +558,32 @@ class GCTest(object): a = A() expected_invalid = self.WREF_IS_INVALID_BEFORE_DEL_IS_CALLED class B(object): - def __del__(self): - # when __del__ is called, the weakref to myself is still valid + pass + class FQ(rgc.FinalizerQueue): + Class = B + def finalizer_trigger(self): + # when we are here, the weakref to myself is still valid # in RPython with most GCs. However, this can lead to strange # bugs with incminimark. https://bugs.pypy.org/issue1687 # So with incminimark, we expect the opposite. - if expected_invalid: - if self.ref() is None: - a.count += 10 # ok - else: - a.count = 666 # not ok - else: - if self.ref() is self: - a.count += 10 # ok + while True: + b = self.next_dead() + if b is None: + break + if expected_invalid: + if b.ref() is None: + a.count += 10 # ok + else: + a.count = 666 # not ok else: - a.count = 666 # not ok + if b.ref() is b: + a.count += 10 # ok + else: + a.count = 666 # not ok + fq = FQ() def g(): b = B() + fq.register_finalizer(b) ref = weakref.ref(b) b.ref = ref return ref @@ -399,10 +601,19 @@ class GCTest(object): class A(object): pass class B(object): - def __del__(self): - self.wref().x += 1 + pass + class FQ(rgc.FinalizerQueue): + Class = B + def finalizer_trigger(self): + while True: + b = self.next_dead() + if b is None: + break + b.wref().x += 1 + fq = FQ() def g(a): b = B() + fq.register_finalizer(b) b.wref = weakref.ref(a) # the only way to reach this weakref is via B, which is an # object with finalizer (but the weakref itself points to @@ -448,9 +659,14 @@ class GCTest(object): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - b.num_deleted += 1 - b.all.append(D(b.num_deleted)) + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while self.next_dead() is not None: + b.num_deleted += 1 + b.all.append(D(b.num_deleted)) + fq = FQ() class D(object): # make a big object that does not use malloc_varsize def __init__(self, x): @@ -461,6 +677,7 @@ class GCTest(object): i = 0 all = [None] * x a = A() + del a while i < x: d = D(i) all[i] = d @@ -481,15 +698,24 @@ class GCTest(object): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - llop.gc__collect(lltype.Void) - b.num_deleted += 1 - C() - C() + fq.register_finalizer(self) class C(A): - def __del__(self): - b.num_deleted += 1 - b.num_deleted_c += 1 + pass + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + llop.gc__collect(lltype.Void) + b.num_deleted += 1 + if isinstance(a, C): + b.num_deleted_c += 1 + else: + C() + C() + fq = FQ() def f(x, y): persistent_a1 = A() persistent_a2 = A() diff --git a/rpython/memory/test/snippet.py b/rpython/memory/test/snippet.py index b6f5f4dd5c..eefe2f2ef3 100644 --- a/rpython/memory/test/snippet.py +++ b/rpython/memory/test/snippet.py @@ -1,5 +1,6 @@ import os, py from rpython.tool.udir import udir +from rpython.rlib import rgc from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem.lloperation import llop @@ -52,7 +53,7 @@ class SemiSpaceGCTestDefines: def set_age_of(c, newvalue): # NB. this used to be a dictionary, but setting into a dict # consumes memory. This has the effect that this test's - # __del__ methods can consume more memory and potentially + # finalizer_trigger method can consume more memory and potentially # cause another collection. This would result in objects # being unexpectedly destroyed at the same 'state.time'. state.age[ord(c) - ord('a')] = newvalue @@ -61,12 +62,21 @@ class SemiSpaceGCTestDefines: def __init__(self, key): self.key = key self.refs = [] - def __del__(self): + fq.register_finalizer(self) + + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): from rpython.rlib.debug import debug_print - debug_print("DEL:", self.key) - assert age_of(self.key) == -1 - set_age_of(self.key, state.time) - state.progress = True + while True: + a = self.next_dead() + if a is None: + break + debug_print("DEL:", a.key) + assert age_of(a.key) == -1 + set_age_of(a.key, state.time) + state.progress = True + fq = FQ() def build_example(input): state.time = 0 @@ -150,11 +160,22 @@ class SemiSpaceGCTestDefines: class B: count = 0 class A: - def __del__(self): - self.b.count += 1 + pass + + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + a.b.count += 1 + fq = FQ() + def g(): b = B() a = A() + fq.register_finalizer(a) a.b = b i = 0 lst = [None] diff --git a/rpython/memory/test/test_transformed_gc.py b/rpython/memory/test/test_transformed_gc.py index 8feeb9198b..c0c678b86a 100644 --- a/rpython/memory/test/test_transformed_gc.py +++ b/rpython/memory/test/test_transformed_gc.py @@ -293,7 +293,7 @@ class GenericGCTests(GCTest): res = run([]) assert res == 42 - def define_finalizer(cls): + def define_destructor(cls): class B(object): pass b = B() @@ -316,6 +316,68 @@ class GenericGCTests(GCTest): return b.num_deleted return f + def test_destructor(self): + run = self.runner("destructor") + res = run([5, 42]) #XXX pure lazyness here too + assert res == 6 + + def define_old_style_finalizer(cls): + class B(object): + pass + b = B() + b.nextid = 0 + b.num_deleted = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + def __del__(self): + llop.gc__collect(lltype.Void) + b.num_deleted += 1 + def f(x, y): + a = A() + i = 0 + while i < x: + i += 1 + a = A() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + return b.num_deleted + return f + + def test_old_style_finalizer(self): + run = self.runner("old_style_finalizer") + res = run([5, 42]) #XXX pure lazyness here too + assert res == 6 + + def define_finalizer(cls): + class B(object): + pass + b = B() + b.nextid = 0 + b.num_deleted = 0 + class A(object): + def __init__(self): + self.id = b.nextid + b.nextid += 1 + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while self.next_dead() is not None: + b.num_deleted += 1 + fq = FQ() + def f(x, y): + a = A() + i = 0 + while i < x: + i += 1 + a = A() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + return b.num_deleted + return f + def test_finalizer(self): run = self.runner("finalizer") res = run([5, 42]) #XXX pure lazyness here too @@ -331,12 +393,20 @@ class GenericGCTests(GCTest): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - b.num_deleted += 1 - C() + fq.register_finalizer(self) class C(AAA): - def __del__(self): - b.num_deleted += 1 + pass + class FQ(rgc.FinalizerQueue): + Class = AAA + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + b.num_deleted += 1 + if not isinstance(a, C): + C() + fq = FQ() def f(x, y): a = AAA() i = 0 @@ -363,9 +433,17 @@ class GenericGCTests(GCTest): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - b.num_deleted += 1 - b.a = self + fq.register_finalizer(self) + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + b.num_deleted += 1 + b.a = a + fq = FQ() def f(x, y): a = A() i = 0 @@ -376,7 +454,7 @@ class GenericGCTests(GCTest): llop.gc__collect(lltype.Void) aid = b.a.id b.a = None - # check that __del__ is not called again + # check that finalizer_trigger() is not called again llop.gc__collect(lltype.Void) llop.gc__collect(lltype.Void) return b.num_deleted * 10 + aid + 100 * (b.a is None) @@ -440,7 +518,7 @@ class GenericGCTests(GCTest): res = run([]) assert res - def define_weakref_to_object_with_finalizer(cls): + def define_weakref_to_object_with_destructor(cls): import weakref, gc class A(object): count = 0 @@ -459,6 +537,36 @@ class GenericGCTests(GCTest): return result return f + def test_weakref_to_object_with_destructor(self): + run = self.runner("weakref_to_object_with_destructor") + res = run([]) + assert res + + def define_weakref_to_object_with_finalizer(cls): + import weakref, gc + class A(object): + count = 0 + a = A() + class B(object): + pass + class FQ(rgc.FinalizerQueue): + Class = B + def finalizer_trigger(self): + while self.next_dead() is not None: + a.count += 1 + fq = FQ() + def g(): + b = B() + fq.register_finalizer(b) + return weakref.ref(b) + def f(): + ref = g() + llop.gc__collect(lltype.Void) + llop.gc__collect(lltype.Void) + result = a.count == 1 and (ref() is None) + return result + return f + def test_weakref_to_object_with_finalizer(self): run = self.runner("weakref_to_object_with_finalizer") res = run([]) @@ -475,15 +583,24 @@ class GenericGCTests(GCTest): def __init__(self): self.id = b.nextid b.nextid += 1 - def __del__(self): - llop.gc__collect(lltype.Void) - b.num_deleted += 1 - C() - C() + fq.register_finalizer(self) class C(A): - def __del__(self): - b.num_deleted += 1 - b.num_deleted_c += 1 + pass + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + while True: + a = self.next_dead() + if a is None: + break + llop.gc__collect(lltype.Void) + b.num_deleted += 1 + if isinstance(a, C): + b.num_deleted_c += 1 + else: + C() + C() + fq = FQ() def f(x, y): persistent_a1 = A() persistent_a2 = A() diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py index aa2a2d78c0..ca38f801d9 100644 --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -211,6 +211,12 @@ def always_inline(func): func._always_inline_ = True return func +def dont_inline(func): + """ mark the function as never-to-be-inlined by the RPython optimizations + (not the JIT!), no matter its size.""" + func._dont_inline_ = True + return func + # ____________________________________________________________ diff --git a/rpython/rlib/parsing/main.py b/rpython/rlib/parsing/main.py index 36c303fab6..6946e4ca42 100644 --- a/rpython/rlib/parsing/main.py +++ b/rpython/rlib/parsing/main.py @@ -7,7 +7,7 @@ def make_parser_from_file(filename): try: t = py.path.local(filename).read(mode='U') regexs, rules, ToAST = parse_ebnf(t) - except ParseError, e: + except ParseError as e: print e.nice_error_message(filename=filename, source=t) raise return make_parse_function(regexs, rules, eof=True) diff --git a/rpython/rlib/parsing/makepackrat.py b/rpython/rlib/parsing/makepackrat.py index b0b7f9ac78..5d357b61a2 100644 --- a/rpython/rlib/parsing/makepackrat.py +++ b/rpython/rlib/parsing/makepackrat.py @@ -632,7 +632,7 @@ class MetaPackratParser(type): p = PyPackratSyntaxParser(source) try: t = p.file() - except BacktrackException, exc: + except BacktrackException as exc: print exc.error.nice_error_message("<docstring>", source) lineno, _ = exc.error.get_line_column(source) errorline = source.split("\n")[lineno] diff --git a/rpython/rlib/parsing/pypackrat.py b/rpython/rlib/parsing/pypackrat.py index 2abb4bb531..c857889f26 100644 --- a/rpython/rlib/parsing/pypackrat.py +++ b/rpython/rlib/parsing/pypackrat.py @@ -29,7 +29,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -61,7 +61,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -93,7 +93,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -125,7 +125,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -167,14 +167,14 @@ class Parser(object): _result = _call_status.result _error = _call_status.error break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos try: _result = self._regex299149370() break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 raise BacktrackException(_error) @@ -197,7 +197,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -231,7 +231,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -265,7 +265,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -299,7 +299,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -360,7 +360,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -403,7 +403,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -433,7 +433,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -480,7 +480,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -504,7 +504,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -551,7 +551,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -569,7 +569,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all2.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 break @@ -586,7 +586,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all4.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 break @@ -600,7 +600,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -623,7 +623,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -670,7 +670,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -691,7 +691,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -705,14 +705,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all8.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 break _result = _all8 _result = _before_discard5 _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break @@ -730,7 +730,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break @@ -744,21 +744,21 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all12.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice13 break _result = _all12 _result = (Nonterminal('productionargs', args + [arg])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice14 = self._pos try: _result = (Nonterminal('productionargs', [])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice14 raise BacktrackException(_error) @@ -781,7 +781,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -833,7 +833,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break @@ -856,14 +856,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all7.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice8 break _result = _all7 _result = _before_discard6 _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 break @@ -875,7 +875,7 @@ class Parser(object): last = _result _result = (Nonterminal('or', l + [last])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice9 = self._pos @@ -884,7 +884,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 raise BacktrackException(_error) @@ -909,7 +909,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -976,7 +976,7 @@ class Parser(object): _error = self._combine_errors(_error, _call_status.error) _result = _before_discard4 _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 break @@ -984,7 +984,7 @@ class Parser(object): cmds = _result _result = (Nonterminal('commands', [cmd] + cmds)) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice5 = self._pos @@ -993,7 +993,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -1018,7 +1018,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1073,7 +1073,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1115,7 +1115,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -1124,7 +1124,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 _choice2 = self._pos @@ -1133,7 +1133,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 _choice3 = self._pos @@ -1142,7 +1142,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 _choice4 = self._pos @@ -1151,7 +1151,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 _choice5 = self._pos @@ -1160,7 +1160,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -1185,7 +1185,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1229,7 +1229,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -1246,7 +1246,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all2.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 break @@ -1269,7 +1269,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1323,7 +1323,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -1337,7 +1337,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break @@ -1354,14 +1354,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all5.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice6 break _result = _all5 _result = (Nonterminal('if', [cmd, condition])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice7 = self._pos @@ -1375,7 +1375,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all8.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 break @@ -1392,14 +1392,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break _result = _all10 _result = (Nonterminal('if', [condition])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 raise BacktrackException(_error) @@ -1412,7 +1412,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all12.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice13 break @@ -1429,7 +1429,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all14.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice15 break @@ -1453,7 +1453,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1497,7 +1497,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -1514,7 +1514,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all2.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 break @@ -1528,7 +1528,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all4.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 break @@ -1545,7 +1545,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -1572,7 +1572,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1619,7 +1619,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -1643,7 +1643,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1690,7 +1690,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -1704,7 +1704,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all2.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 break @@ -1731,7 +1731,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1781,7 +1781,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -1795,14 +1795,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break _result = _all3 _result = (Nonterminal('maybe', [what])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice5 = self._pos @@ -1819,7 +1819,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -1829,14 +1829,14 @@ class Parser(object): try: _result = self.__chars__('*') break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice8 _choice9 = self._pos try: _result = self.__chars__('+') break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 raise BacktrackException(_error) @@ -1851,14 +1851,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break _result = _all10 _result = (Nonterminal('repetition', [repetition, what])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -1874,7 +1874,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all12.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice13 break @@ -1884,14 +1884,14 @@ class Parser(object): try: _result = self.__chars__('*') break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice14 _choice15 = self._pos try: _result = self.__chars__('+') break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice15 raise BacktrackException(_error) @@ -1906,7 +1906,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all16.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice17 break @@ -1930,7 +1930,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1977,7 +1977,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -1994,14 +1994,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break _result = _all3 _result = (Nonterminal('negation', [what])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice5 = self._pos @@ -2010,7 +2010,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -2035,7 +2035,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -2082,7 +2082,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -2099,7 +2099,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break @@ -2113,14 +2113,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all5.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice6 break _result = _all5 _result = (Nonterminal('exclusive', [what])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice7 = self._pos @@ -2134,7 +2134,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all8.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 break @@ -2151,7 +2151,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break @@ -2165,14 +2165,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all12.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice13 break _result = _all12 _result = (Nonterminal('ignore', [what])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 _choice14 = self._pos @@ -2187,7 +2187,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all16.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice17 break @@ -2206,14 +2206,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all19.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice20 break _result = _all19 _result = _before_discard18 break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice14 _choice21 = self._pos @@ -2222,7 +2222,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice21 raise BacktrackException(_error) @@ -2247,7 +2247,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -2289,7 +2289,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -2306,14 +2306,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break _result = _all3 _result = _before_discard2 break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 _choice5 = self._pos @@ -2330,14 +2330,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all7.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice8 break _result = _all7 _result = _before_discard6 break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -2353,7 +2353,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break @@ -2377,7 +2377,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -2428,7 +2428,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -2451,7 +2451,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -2498,7 +2498,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -2519,7 +2519,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -2533,14 +2533,14 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all8.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 break _result = _all8 _result = _before_discard5 _all3.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 break @@ -2559,21 +2559,21 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all10.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice11 break _result = _all10 _result = (Nonterminal("args", args + [last])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice12 = self._pos try: _result = (Nonterminal("args", [])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice12 raise BacktrackException(_error) @@ -2596,7 +2596,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) diff --git a/rpython/rlib/parsing/regexparse.py b/rpython/rlib/parsing/regexparse.py index b391a96b50..e352b30b8f 100644 --- a/rpython/rlib/parsing/regexparse.py +++ b/rpython/rlib/parsing/regexparse.py @@ -299,7 +299,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -359,7 +359,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -408,7 +408,7 @@ class Parser(object): r2 = _result _result = (r1 | r2) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -417,7 +417,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 raise BacktrackException(_error) @@ -442,7 +442,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -485,7 +485,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all0.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 break @@ -509,7 +509,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -554,7 +554,7 @@ class Parser(object): _result = self.__chars__('*') _result = (r1.kleene()) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -566,7 +566,7 @@ class Parser(object): _result = self.__chars__('+') _result = (r1 + r1.kleene()) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 _choice2 = self._pos @@ -578,7 +578,7 @@ class Parser(object): _result = self.__chars__('?') _result = (regex.StringExpression("") | r1) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 _choice3 = self._pos @@ -595,7 +595,7 @@ class Parser(object): _result = self.__chars__('}') _result = (r1 * n + r1.kleene()) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 _choice4 = self._pos @@ -612,7 +612,7 @@ class Parser(object): _result = self.__chars__('}') _result = (r1 * n[0] + reduce(operator.or_, [r1 * i for i in range(n[1] - n[0] + 1)], regex.StringExpression(""))) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 _choice5 = self._pos @@ -620,7 +620,7 @@ class Parser(object): _result = self.__chars__('{') _result = (regex.StringExpression("{")) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 _choice6 = self._pos @@ -629,7 +629,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice6 raise BacktrackException(_error) @@ -654,7 +654,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -702,7 +702,7 @@ class Parser(object): _result = self.__chars__(')') _result = _before_discard2 break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice3 = self._pos @@ -711,7 +711,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 _choice4 = self._pos @@ -722,7 +722,7 @@ class Parser(object): cc = _result _result = (reduce(operator.or_, [regex.RangeExpression(a, chr(ord(a) + b - 1)) for a, b in compress_char_set(cc)])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 _choice5 = self._pos @@ -733,7 +733,7 @@ class Parser(object): c = _result _result = (regex.StringExpression(c)) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 _choice6 = self._pos @@ -741,7 +741,7 @@ class Parser(object): _result = self.__chars__('.') _result = (regex.RangeExpression(chr(0), chr(255))) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice6 _choice7 = self._pos @@ -749,7 +749,7 @@ class Parser(object): _result = self.__chars__('-') _result = (regex.StringExpression('-')) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 _choice8 = self._pos @@ -757,7 +757,7 @@ class Parser(object): _result = self.__chars__('\\') _result = (regex.StringExpression('\\')) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice8 _choice9 = self._pos @@ -765,7 +765,7 @@ class Parser(object): _result = self.__chars__(']') _result = (regex.StringExpression(']')) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice9 raise BacktrackException(_error) @@ -789,7 +789,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -833,7 +833,7 @@ class Parser(object): c = _result _result = (unescape(c)) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -844,7 +844,7 @@ class Parser(object): c = _result _result = (c) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 raise BacktrackException(_error) @@ -871,7 +871,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -903,7 +903,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -935,7 +935,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error @@ -994,7 +994,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1039,7 +1039,7 @@ class Parser(object): s = _result _result = (set([chr(c) for c in range(256)]) - s) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -1048,7 +1048,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 raise BacktrackException(_error) @@ -1073,7 +1073,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1120,7 +1120,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error _all1.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 break @@ -1128,7 +1128,7 @@ class Parser(object): l = _result _result = (reduce(operator.or_, [set(["]"])] + l)) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice3 = self._pos @@ -1145,7 +1145,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all4.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 break @@ -1153,7 +1153,7 @@ class Parser(object): l = _result _result = (reduce(operator.or_, l)) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 raise BacktrackException(_error) @@ -1169,7 +1169,7 @@ class Parser(object): _result = _call_status.result _error = self._combine_errors(_error, _call_status.error) _all6.append(_result) - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 break @@ -1194,7 +1194,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1236,7 +1236,7 @@ class Parser(object): _result = _call_status.result _error = _call_status.error break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -1252,7 +1252,7 @@ class Parser(object): c2 = _result _result = (set([chr(i) for i in range(ord(c1), ord(c2) + 1)])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 _choice2 = self._pos @@ -1260,7 +1260,7 @@ class Parser(object): _result = self.__chars__('.') _result = ( set(['.']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 _choice3 = self._pos @@ -1268,7 +1268,7 @@ class Parser(object): _result = self.__chars__('*') _result = ( set(['*']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 _choice4 = self._pos @@ -1276,7 +1276,7 @@ class Parser(object): _result = self.__chars__('+') _result = ( set(['+']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 _choice5 = self._pos @@ -1284,7 +1284,7 @@ class Parser(object): _result = self.__chars__('?') _result = ( set(['?']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 _choice6 = self._pos @@ -1292,7 +1292,7 @@ class Parser(object): _result = self.__chars__('-') _result = ( set(['-']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice6 _choice7 = self._pos @@ -1300,7 +1300,7 @@ class Parser(object): _result = self.__chars__('[') _result = ( set(['[']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice7 _choice8 = self._pos @@ -1311,7 +1311,7 @@ class Parser(object): c = _result _result = ( set([c]) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice8 raise BacktrackException(_error) @@ -1338,7 +1338,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1387,7 +1387,7 @@ class Parser(object): n2 = _result _result = (n1, n2) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice0 _choice1 = self._pos @@ -1398,7 +1398,7 @@ class Parser(object): n1 = _result _result = (n1, n1) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 raise BacktrackException(_error) @@ -1425,7 +1425,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1483,7 +1483,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1515,7 +1515,7 @@ class Parser(object): _result = self.__chars__('d') _result = ( set([chr(c) for c in range(ord('0'), ord('9')+1)]) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = _exc.error self._pos = _choice0 _choice1 = self._pos @@ -1524,7 +1524,7 @@ class Parser(object): _result = self.__chars__('s') _result = ( set(['\t', '\n', '\f', '\r', ' ']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice1 _choice2 = self._pos @@ -1533,7 +1533,7 @@ class Parser(object): _result = self.__chars__('w') _result = ( set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice2 _choice3 = self._pos @@ -1542,7 +1542,7 @@ class Parser(object): _result = self.__chars__('D') _result = ( set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('0'), ord('9')+1)]) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice3 _choice4 = self._pos @@ -1551,7 +1551,7 @@ class Parser(object): _result = self.__chars__('S') _result = ( set([chr(c) for c in range(256)]) - set(['\t', '\n', '\f', '\r', ' ']) ) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice4 _choice5 = self._pos @@ -1560,7 +1560,7 @@ class Parser(object): _result = self.__chars__('W') _result = ( set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_'])) break - except BacktrackException, _exc: + except BacktrackException as _exc: _error = self._combine_errors(_error, _exc.error) self._pos = _choice5 raise BacktrackException(_error) @@ -1574,7 +1574,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = self._combine_errors(_error, _exc.error) @@ -1608,7 +1608,7 @@ class Parser(object): _status.result = _result _status.error = _error return _status - except BacktrackException, _exc: + except BacktrackException as _exc: _status.pos = -1 _status.result = None _error = _exc.error diff --git a/rpython/rlib/rawrefcount.py b/rpython/rlib/rawrefcount.py index 035f8c7b7f..c9e7239b40 100644 --- a/rpython/rlib/rawrefcount.py +++ b/rpython/rlib/rawrefcount.py @@ -27,12 +27,13 @@ def init(dealloc_trigger_callback=None): """NOT_RPYTHON: set up rawrefcount with the GC. This is only used for tests; it should not be called at all during translation. """ - global _p_list, _o_list, _adr2pypy, _pypy2ob + global _p_list, _o_list, _adr2pypy, _pypy2ob, _ob_set global _d_list, _dealloc_trigger_callback _p_list = [] _o_list = [] _adr2pypy = [None] _pypy2ob = {} + _ob_set = set() _d_list = [] _dealloc_trigger_callback = dealloc_trigger_callback @@ -40,19 +41,23 @@ def create_link_pypy(p, ob): "NOT_RPYTHON: a link where the PyPy object contains some or all the data" #print 'create_link_pypy\n\t%s\n\t%s' % (p, ob) assert p not in _pypy2ob - #assert not ob.c_ob_pypy_link + assert ob._obj not in _ob_set + assert not ob.c_ob_pypy_link ob.c_ob_pypy_link = _build_pypy_link(p) _pypy2ob[p] = ob _p_list.append(ob) + _ob_set.add(ob._obj) def create_link_pyobj(p, ob): """NOT_RPYTHON: a link where the PyObject contains all the data. from_obj() will not work on this 'p'.""" #print 'create_link_pyobj\n\t%s\n\t%s' % (p, ob) assert p not in _pypy2ob - #assert not ob.c_ob_pypy_link + assert ob._obj not in _ob_set + assert not ob.c_ob_pypy_link ob.c_ob_pypy_link = _build_pypy_link(p) _o_list.append(ob) + _ob_set.add(ob._obj) def from_obj(OB_PTR_TYPE, p): "NOT_RPYTHON" diff --git a/rpython/rlib/rdynload.py b/rpython/rlib/rdynload.py index 8bf40a9bb3..1f6844a9df 100644 --- a/rpython/rlib/rdynload.py +++ b/rpython/rlib/rdynload.py @@ -97,7 +97,7 @@ if not _WIN32: name = rffi.charp2str(name) try: ctypes.CDLL(name) - except OSError, e: + except OSError as e: return str(e) else: return ("opening %r with ctypes.CDLL() works, " diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py index 99be096eef..b79fe5dadd 100644 --- a/rpython/rlib/rgc.py +++ b/rpython/rlib/rgc.py @@ -5,6 +5,7 @@ import types from rpython.rlib import jit from rpython.rlib.objectmodel import we_are_translated, enforceargs, specialize +from rpython.rlib.objectmodel import CDefinedIntSymbolic from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rtyper.lltypesystem import lltype, llmemory @@ -361,11 +362,174 @@ def no_collect(func): return func def must_be_light_finalizer(func): + """Mark a __del__ method as being a destructor, calling only a limited + set of operations. See pypy/doc/discussion/finalizer-order.rst. + + If you use the same decorator on a class, this class and all its + subclasses are only allowed to have __del__ methods which are + similarly decorated (or no __del__ at all). It prevents a class + hierarchy from having destructors in some parent classes, which are + overridden in subclasses with (non-light, old-style) finalizers. + (This case is the original motivation for FinalizerQueue.) + """ func._must_be_light_finalizer_ = True return func + +class FinalizerQueue(object): + """A finalizer queue. See pypy/doc/discussion/finalizer-order.rst. + Note: only works with the framework GCs (like minimark). It is + ignored with Boehm or with refcounting (used by tests). + """ + # Must be subclassed, and the subclass needs these attributes: + # + # Class: + # the class (or base class) of finalized objects + # + # def finalizer_trigger(self): + # called to notify that new items have been put in the queue + + def _freeze_(self): + return True + + @specialize.arg(0) + @jit.dont_look_inside + def next_dead(self): + if we_are_translated(): + from rpython.rtyper.lltypesystem.lloperation import llop + from rpython.rtyper.rclass import OBJECTPTR + from rpython.rtyper.annlowlevel import cast_base_ptr_to_instance + tag = FinalizerQueue._get_tag(self) + ptr = llop.gc_fq_next_dead(OBJECTPTR, tag) + return cast_base_ptr_to_instance(self.Class, ptr) + try: + return self._queue.popleft() + except (AttributeError, IndexError): + return None + + @specialize.arg(0) + @jit.dont_look_inside + def register_finalizer(self, obj): + assert isinstance(obj, self.Class) + if we_are_translated(): + from rpython.rtyper.lltypesystem.lloperation import llop + from rpython.rtyper.rclass import OBJECTPTR + from rpython.rtyper.annlowlevel import cast_instance_to_base_ptr + tag = FinalizerQueue._get_tag(self) + ptr = cast_instance_to_base_ptr(obj) + llop.gc_fq_register(lltype.Void, tag, ptr) + return + else: + self._untranslated_register_finalizer(obj) + + def _get_tag(self): + "NOT_RPYTHON: special-cased below" + + def _reset(self): + import collections + self._weakrefs = set() + self._queue = collections.deque() + + def _already_registered(self, obj): + return hasattr(obj, '__enable_del_for_id') + + def _untranslated_register_finalizer(self, obj): + assert not self._already_registered(obj) + + if not hasattr(self, '_queue'): + self._reset() + + # Fetch and check the type of 'obj' + objtyp = obj.__class__ + assert isinstance(objtyp, type), ( + "%r: to run register_finalizer() untranslated, " + "the object's class must be new-style" % (obj,)) + assert hasattr(obj, '__dict__'), ( + "%r: to run register_finalizer() untranslated, " + "the object must have a __dict__" % (obj,)) + assert (not hasattr(obj, '__slots__') or + type(obj).__slots__ == () or + type(obj).__slots__ == ('__weakref__',)), ( + "%r: to run register_finalizer() untranslated, " + "the object must not have __slots__" % (obj,)) + + # The first time, patch the method __del__ of the class, if + # any, so that we can disable it on the original 'obj' and + # enable it only on the 'newobj' + _fq_patch_class(objtyp) + + # Build a new shadow object with the same class and dict + newobj = object.__new__(objtyp) + obj.__dict__ = obj.__dict__.copy() #PyPy: break the dict->obj dependency + newobj.__dict__ = obj.__dict__ + + # A callback that is invoked when (or after) 'obj' is deleted; + # 'newobj' is still kept alive here + def callback(wr): + self._weakrefs.discard(wr) + self._queue.append(newobj) + self.finalizer_trigger() + + import weakref + wr = weakref.ref(obj, callback) + self._weakrefs.add(wr) + + # Disable __del__ on the original 'obj' and enable it only on + # the 'newobj'. Use id() and not a regular reference, because + # that would make a cycle between 'newobj' and 'obj.__dict__' + # (which is 'newobj.__dict__' too). + setattr(obj, '__enable_del_for_id', id(newobj)) + + +def _fq_patch_class(Cls): + if Cls in _fq_patched_classes: + return + if '__del__' in Cls.__dict__: + def __del__(self): + if not we_are_translated(): + try: + if getattr(self, '__enable_del_for_id') != id(self): + return + except AttributeError: + pass + original_del(self) + original_del = Cls.__del__ + Cls.__del__ = __del__ + _fq_patched_classes.add(Cls) + for BaseCls in Cls.__bases__: + _fq_patch_class(BaseCls) + +_fq_patched_classes = set() + +class FqTagEntry(ExtRegistryEntry): + _about_ = FinalizerQueue._get_tag.im_func + + def compute_result_annotation(self, s_fq): + assert s_fq.is_constant() + fq = s_fq.const + s_func = self.bookkeeper.immutablevalue(fq.finalizer_trigger) + self.bookkeeper.emulate_pbc_call(self.bookkeeper.position_key, + s_func, []) + if not hasattr(fq, '_fq_tag'): + fq._fq_tag = CDefinedIntSymbolic( + '0 /*FinalizerQueue TAG for %s*/' % fq.__class__.__name__, + default=fq) + return self.bookkeeper.immutablevalue(fq._fq_tag) + + def specialize_call(self, hop): + from rpython.rtyper.rclass import InstanceRepr + translator = hop.rtyper.annotator.translator + fq = hop.args_s[0].const + graph = translator._graphof(fq.finalizer_trigger.im_func) + InstanceRepr.check_graph_of_del_does_not_call_too_much(hop.rtyper, + graph) + hop.exception_cannot_occur() + return hop.inputconst(lltype.Signed, hop.s_result.const) + + # ____________________________________________________________ + def get_rpy_roots(): "NOT_RPYTHON" # Return the 'roots' from the GC. diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py index ff6b9b4257..39af9f4ab1 100644 --- a/rpython/rlib/rsocket.py +++ b/rpython/rlib/rsocket.py @@ -933,7 +933,7 @@ class RSocket(object): res = self.send_raw(p, remaining, flags) p = rffi.ptradd(p, res) remaining -= res - except CSocketError, e: + except CSocketError as e: if e.errno != _c.EINTR: raise if signal_checker is not None: diff --git a/rpython/rlib/rsre/rpy/_sre.py b/rpython/rlib/rsre/rpy/_sre.py index c246ec13d3..9fa12d50f6 100644 --- a/rpython/rlib/rsre/rpy/_sre.py +++ b/rpython/rlib/rsre/rpy/_sre.py @@ -19,7 +19,7 @@ def get_code(regexp, flags=0, allargs=False): from . import sre_compile try: sre_compile.compile(regexp, flags) - except GotIt, e: + except GotIt as e: pass else: raise ValueError("did not reach _sre.compile()!") diff --git a/rpython/rlib/rsre/test/test_search.py b/rpython/rlib/rsre/test/test_search.py index e5f7ac2158..071cbeeb56 100644 --- a/rpython/rlib/rsre/test/test_search.py +++ b/rpython/rlib/rsre/test/test_search.py @@ -169,7 +169,7 @@ class TestSearch: def test_empty_maxuntil_2(self): try: r_code, r = get_code_and_re(r'X(.*?)+X') - except re.error, e: + except re.error as e: py.test.skip("older version of the stdlib: %s" % (e,)) assert r.match('XfooXbarX').span() == (0, 5) assert r.match('XfooXbarX').span(1) == (4, 4) diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py index a5eaf5311f..74fb5fdbb5 100644 --- a/rpython/rlib/rthread.py +++ b/rpython/rlib/rthread.py @@ -100,8 +100,11 @@ def get_ident(): return thread.get_ident() def get_or_make_ident(): - assert we_are_translated() - return tlfield_thread_ident.get_or_make_raw() + if we_are_translated(): + return tlfield_thread_ident.get_or_make_raw() + else: + import thread + return thread.get_ident() @specialize.arg(0) def start_new_thread(x, y): diff --git a/rpython/rlib/runicode.py b/rpython/rlib/runicode.py index 30ef4b7f96..46a3312c4b 100644 --- a/rpython/rlib/runicode.py +++ b/rpython/rlib/runicode.py @@ -989,8 +989,6 @@ def str_decode_latin_1(s, size, errors, final=False, return result.build(), pos -# Specialize on the errorhandler when it's a constant -@specialize.arg_or_var(4) def str_decode_ascii(s, size, errors, final=False, errorhandler=None): if errorhandler is None: @@ -1020,8 +1018,6 @@ def fast_str_decode_ascii(s): return result.build() -# Specialize on the errorhandler when it's a constant -@specialize.arg_or_var(3) def unicode_encode_ucs1_helper(p, size, errors, errorhandler=None, limit=256): if errorhandler is None: @@ -1064,12 +1060,10 @@ def unicode_encode_ucs1_helper(p, size, errors, return result.build() -@specialize.arg_or_var(3) def unicode_encode_latin_1(p, size, errors, errorhandler=None): res = unicode_encode_ucs1_helper(p, size, errors, errorhandler, 256) return res -@specialize.arg_or_var(3) def unicode_encode_ascii(p, size, errors, errorhandler=None): res = unicode_encode_ucs1_helper(p, size, errors, errorhandler, 128) return res @@ -1194,8 +1188,6 @@ def hexescape(builder, s, pos, digits, builder.append(res) return pos -# Specialize on the errorhandler when it's a constant -@specialize.arg_or_var(4) def str_decode_unicode_escape(s, size, errors, final=False, errorhandler=None, unicodedata_handler=None): diff --git a/rpython/rlib/rurandom.py b/rpython/rlib/rurandom.py index d5d459a74c..2a61830c61 100644 --- a/rpython/rlib/rurandom.py +++ b/rpython/rlib/rurandom.py @@ -103,7 +103,7 @@ else: # Posix implementation while n > 0: try: data = os.read(fd, n) - except OSError, e: + except OSError as e: if e.errno != errno.EINTR: raise data = '' diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h index 6e9c6dda77..bdba4546e4 100644 --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -82,6 +82,10 @@ static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_dept int n = 0; intptr_t addr = 0; int bottom_jitted = 0; + + if (stack == NULL) + return 0; + // check if the pc is in JIT #ifdef PYPY_JIT_CODEMAP if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { @@ -111,7 +115,12 @@ static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_dept #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { - return RPY_THREADLOCALREF_GET(vmprof_tl_stack); + struct pypy_threadlocal_s *tl; + _OP_THREADLOCALREF_ADDR_SIGHANDLER(tl); + if (tl == NULL) + return NULL; + else + return tl->vmprof_tl_stack; } #else static vmprof_stack_t *get_vmprof_stack(void) diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py index 9f0f4db5a9..663dfeef3b 100644 --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -64,7 +64,7 @@ def test_register_code(): pass try: rvmprof.register_code_object_class(MyCode, lambda code: 'some code') - except rvmprof.VMProfPlatformUnsupported, e: + except rvmprof.VMProfPlatformUnsupported as e: py.test.skip(str(e)) @rvmprof.vmprof_execute_code("xcode1", lambda code, num: code) @@ -92,7 +92,7 @@ def test_enable(): return 'py:code:52:x' try: rvmprof.register_code_object_class(MyCode, get_name) - except rvmprof.VMProfPlatformUnsupported, e: + except rvmprof.VMProfPlatformUnsupported as e: py.test.skip(str(e)) @rvmprof.vmprof_execute_code("xcode1", lambda code, num: code) diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py index 476b244c5a..7a8747bd85 100644 --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -19,7 +19,7 @@ class MyCode: def setup_module(mod): try: rvmprof.register_code_object_class(MyCode, MyCode.get_name) - except rvmprof.VMProfPlatformUnsupported, e: + except rvmprof.VMProfPlatformUnsupported as e: py.test.skip(str(e)) diff --git a/rpython/rlib/rweakref.py b/rpython/rlib/rweakref.py index 60b1f0140b..b83629b6df 100644 --- a/rpython/rlib/rweakref.py +++ b/rpython/rlib/rweakref.py @@ -7,7 +7,14 @@ a form of WeakKeyDictionary, and a limited version of WeakValueDictionary. import weakref from rpython.annotator.model import UnionError -ref = weakref.ref # basic regular weakrefs are supported in RPython + +# Basic regular weakrefs are supported in RPython. +# Note that if 'translation.rweakref' is False, they will +# still work, but be implemented as a strong reference. +# This case is useful for developing new GCs, for example. + +ref = weakref.ref + def has_weakref_support(): return True # returns False if --no-translation-rweakref diff --git a/rpython/rlib/streamio.py b/rpython/rlib/streamio.py index 7456fbd48d..ab3ca6bfd4 100644 --- a/rpython/rlib/streamio.py +++ b/rpython/rlib/streamio.py @@ -324,7 +324,7 @@ class DiskFile(Stream): while True: try: return os.read(self.fd, n) - except OSError, e: + except OSError as e: if e.errno != errno.EINTR: raise if self.signal_checker is not None: @@ -338,7 +338,7 @@ class DiskFile(Stream): while True: try: c = os.read(self.fd, 1) - except OSError, e: + except OSError as e: if e.errno != errno.EINTR: raise if self.signal_checker is not None: @@ -356,7 +356,7 @@ class DiskFile(Stream): while data: try: n = os.write(self.fd, data) - except OSError, e: + except OSError as e: if e.errno != errno.EINTR: raise if self.signal_checker is not None: @@ -383,7 +383,7 @@ class DiskFile(Stream): else: try: os.ftruncate(self.fd, size) - except IOError, e: + except IOError as e: raise OSError(*e.args) def try_to_find_file_descriptor(self): @@ -669,7 +669,7 @@ class BufferingInputStream(Stream): while 1: try: data = self.do_read(bufsize) - except OSError, o: + except OSError as o: # like CPython < 3.4, partial results followed by an error # are returned as data if not chunks: diff --git a/rpython/rlib/test/test_libffi.py b/rpython/rlib/test/test_libffi.py index 8caebdf2f4..0b206ac3f9 100644 --- a/rpython/rlib/test/test_libffi.py +++ b/rpython/rlib/test/test_libffi.py @@ -500,7 +500,7 @@ class TestLibffiCall(BaseFfiTest): exec s in glob, loc except TypeError: pass - except LLException, e: + except LLException as e: if str(e) != "<LLException 'TypeError'>": raise else: @@ -581,10 +581,10 @@ class TestLibffiCall(BaseFfiTest): func = (libfoo, '_std_diff_xy@8', [types.sint, types.signed], types.sint) try: self.call(func, [50, 8], lltype.Signed) - except ValueError, e: + except ValueError as e: assert e.message == 'Procedure called with not enough ' + \ 'arguments (8 bytes missing) or wrong calling convention' - except LLException, e: + except LLException as e: #jitted code raises this assert str(e) == "<LLException 'StackCheckError'>" else: diff --git a/rpython/rlib/test/test_objectmodel.py b/rpython/rlib/test/test_objectmodel.py index 18e05f29db..56ae813f05 100644 --- a/rpython/rlib/test/test_objectmodel.py +++ b/rpython/rlib/test/test_objectmodel.py @@ -764,7 +764,7 @@ def test_import_from_mixin(): class B(object): a = 63 import_from_mixin(M) - except Exception, e: + except Exception as e: assert ("would overwrite the value already defined locally for 'a'" in str(e)) else: diff --git a/rpython/rlib/test/test_rgc.py b/rpython/rlib/test/test_rgc.py index 0502eb9f16..e68a0bc1cb 100644 --- a/rpython/rlib/test/test_rgc.py +++ b/rpython/rlib/test/test_rgc.py @@ -1,4 +1,5 @@ from rpython.rtyper.test.test_llinterp import gengraph, interpret +from rpython.rtyper.error import TyperError from rpython.rtyper.lltypesystem import lltype, llmemory from rpython.rlib import rgc # Force registration of gc.collect import gc @@ -252,3 +253,134 @@ def test_register_custom_trace_hook(): t, typer, graph = gengraph(f, []) assert typer.custom_trace_funcs == [(TP, trace_func)] + + +# ____________________________________________________________ + + +class T_Root(object): + pass + +class T_Int(T_Root): + def __init__(self, x): + self.x = x + +class SimpleFQ(rgc.FinalizerQueue): + Class = T_Root + _triggered = 0 + def finalizer_trigger(self): + self._triggered += 1 + +class TestFinalizerQueue: + + def test_simple(self): + fq = SimpleFQ() + assert fq.next_dead() is None + assert fq._triggered == 0 + w = T_Int(67) + fq.register_finalizer(w) + # + gc.collect() + assert fq._triggered == 0 + assert fq.next_dead() is None + # + del w + gc.collect() + assert fq._triggered == 1 + n = fq.next_dead() + assert type(n) is T_Int and n.x == 67 + # + gc.collect() + assert fq._triggered == 1 + assert fq.next_dead() is None + + def test_del_1(self): + deleted = {} + class T_Del(T_Int): + def __del__(self): + deleted[self.x] = deleted.get(self.x, 0) + 1 + + fq = SimpleFQ() + fq.register_finalizer(T_Del(42)) + gc.collect(); gc.collect() + assert deleted == {} + assert fq._triggered == 1 + n = fq.next_dead() + assert type(n) is T_Del and n.x == 42 + assert deleted == {} + del n + gc.collect() + assert fq.next_dead() is None + assert deleted == {42: 1} + assert fq._triggered == 1 + + def test_del_2(self): + deleted = {} + class T_Del1(T_Int): + def __del__(self): + deleted[1, self.x] = deleted.get((1, self.x), 0) + 1 + class T_Del2(T_Del1): + def __del__(self): + deleted[2, self.x] = deleted.get((2, self.x), 0) + 1 + T_Del1.__del__(self) + + fq = SimpleFQ() + w = T_Del2(42) + fq.register_finalizer(w) + del w + fq.register_finalizer(T_Del1(21)) + gc.collect(); gc.collect() + assert deleted == {} + assert fq._triggered == 2 + a = fq.next_dead() + b = fq.next_dead() + if a.x == 21: + a, b = b, a + assert type(a) is T_Del2 and a.x == 42 + assert type(b) is T_Del1 and b.x == 21 + assert deleted == {} + del a, b + gc.collect() + assert fq.next_dead() is None + assert deleted == {(1, 42): 1, (2, 42): 1, (1, 21): 1} + assert fq._triggered == 2 + + def test_del_3(self): + deleted = {} + class T_Del1(T_Int): + def __del__(self): + deleted[1, self.x] = deleted.get((1, self.x), 0) + 1 + class T_Del2(T_Del1): + pass + + fq = SimpleFQ() + fq.register_finalizer(T_Del2(42)) + gc.collect(); gc.collect() + assert deleted == {} + assert fq._triggered == 1 + a = fq.next_dead() + assert type(a) is T_Del2 and a.x == 42 + assert deleted == {} + del a + gc.collect() + assert fq.next_dead() is None + assert deleted == {(1, 42): 1} + assert fq._triggered == 1 + + def test_finalizer_trigger_calls_too_much(self): + from rpython.rtyper.lltypesystem import lltype, rffi + external_func = rffi.llexternal("foo", [], lltype.Void) + # ^^^ with release_gil=True + class X(object): + pass + class FQ(rgc.FinalizerQueue): + Class = X + def finalizer_trigger(self): + external_func() + fq = FQ() + def f(): + x = X() + fq.register_finalizer(x) + + e = py.test.raises(TyperError, gengraph, f, []) + assert str(e.value).startswith('the RPython-level __del__() method in') diff --git a/rpython/rlib/test/test_rmmap.py b/rpython/rlib/test/test_rmmap.py index 6d425b7216..2a98305b7f 100644 --- a/rpython/rlib/test/test_rmmap.py +++ b/rpython/rlib/test/test_rmmap.py @@ -296,7 +296,7 @@ class TestMMap: f = open(self.tmpname + "l2", "w+") f.write("foobar") f.flush() - m = mmap.mmap(f.fileno(), 6, prot=~mmap.PROT_WRITE) + m = mmap.mmap(f.fileno(), 6, prot=mmap.PROT_READ|mmap.PROT_EXEC) py.test.raises(RTypeError, m.check_writeable) py.test.raises(RTypeError, m.check_writeable) m.close() diff --git a/rpython/rlib/test/test_rposix.py b/rpython/rlib/test/test_rposix.py index ddbc1b87c5..8b7778cb15 100644 --- a/rpython/rlib/test/test_rposix.py +++ b/rpython/rlib/test/test_rposix.py @@ -39,7 +39,7 @@ class TestPosixFunction: def test_getlogin(self): try: expected = os.getlogin() - except OSError, e: + except OSError as e: py.test.skip("the underlying os.getlogin() failed: %s" % e) data = rposix.getlogin() assert data == expected diff --git a/rpython/rlib/test/test_rposix_stat.py b/rpython/rlib/test/test_rposix_stat.py index c16ac56693..0c5f5aef04 100644 --- a/rpython/rlib/test/test_rposix_stat.py +++ b/rpython/rlib/test/test_rposix_stat.py @@ -44,7 +44,7 @@ class TestPosixStatFunctions: def test_statvfs(self): try: os.statvfs('.') - except OSError, e: + except OSError as e: py.test.skip("the underlying os.statvfs() failed: %s" % e) rposix_stat.statvfs('.') @@ -53,7 +53,7 @@ class TestPosixStatFunctions: def test_fstatvfs(self): try: os.fstatvfs(0) - except OSError, e: + except OSError as e: py.test.skip("the underlying os.fstatvfs() failed: %s" % e) rposix_stat.fstatvfs(0) diff --git a/rpython/rlib/test/test_rsocket.py b/rpython/rlib/test/test_rsocket.py index 1e25fed3fe..ea01b766c0 100644 --- a/rpython/rlib/test/test_rsocket.py +++ b/rpython/rlib/test/test_rsocket.py @@ -152,7 +152,7 @@ def test_simple_tcp(): sock.bind(INETAddress('127.0.0.1', port)) print 'works' break - except SocketError, e: # should get a "Permission denied" + except SocketError as e: # should get a "Permission denied" print e else: raise e @@ -212,7 +212,7 @@ def test_simple_udp(): s1.bind(INETAddress('127.0.0.1', port)) print 'works' break - except SocketError, e: # should get a "Permission denied" + except SocketError as e: # should get a "Permission denied" print e else: raise e @@ -247,7 +247,7 @@ def test_nonblocking(): sock.bind(INETAddress('127.0.0.1', port)) print 'works' break - except SocketError, e: # should get a "Permission denied" + except SocketError as e: # should get a "Permission denied" print e else: raise e diff --git a/rpython/rlib/test/test_rstacklet.py b/rpython/rlib/test/test_rstacklet.py index 2054049f4c..53cb640fa1 100644 --- a/rpython/rlib/test/test_rstacklet.py +++ b/rpython/rlib/test/test_rstacklet.py @@ -4,7 +4,7 @@ import platform from rpython.rtyper.tool.rffi_platform import CompilationError try: from rpython.rlib import rstacklet -except CompilationError, e: +except CompilationError as e: py.test.skip("cannot import rstacklet: %s" % e) from rpython.config.translationoption import DEFL_ROOTFINDER_WITHJIT diff --git a/rpython/rlib/test/test_rtermios.py b/rpython/rlib/test/test_rtermios.py index 5b63459afd..d7834320e2 100644 --- a/rpython/rlib/test/test_rtermios.py +++ b/rpython/rlib/test/test_rtermios.py @@ -50,7 +50,7 @@ class TestLLTermios(object): fd = os.open('.', 0, 0777) try: rtermios.tcgetattr(fd) - except OSError, e: + except OSError as e: assert e.errno == errno.ENOTTY print "ok" diff --git a/rpython/rlib/test/test_runicode.py b/rpython/rlib/test/test_runicode.py index 110f1c64ad..5689e0014e 100644 --- a/rpython/rlib/test/test_runicode.py +++ b/rpython/rlib/test/test_runicode.py @@ -53,7 +53,7 @@ class UnicodeTests(object): else: trueresult = s s = s.encode(encoding) - except LookupError, e: + except LookupError as e: py.test.skip(e) result, consumed = decoder(s, len(s), True) assert consumed == len(s) @@ -67,7 +67,7 @@ class UnicodeTests(object): else: trueresult = s s = s.decode(encoding) - except LookupError, e: + except LookupError as e: py.test.skip(e) result = encoder(s, len(s), True) self.typeequals(trueresult, result) diff --git a/rpython/rlib/test/test_streamio.py b/rpython/rlib/test/test_streamio.py index 67342083e6..1677c6075e 100644 --- a/rpython/rlib/test/test_streamio.py +++ b/rpython/rlib/test/test_streamio.py @@ -675,7 +675,7 @@ class TestMMapFile(BaseTestBufferingInputStreamTests): self.tfn = None try: os.remove(tfn) - except os.error, msg: + except os.error as msg: print "can't remove %s: %s" % (tfn, msg) def makeStream(self, tell=None, seek=None, bufsize=-1, mode="r"): diff --git a/rpython/rtyper/annlowlevel.py b/rpython/rtyper/annlowlevel.py index ba160b062b..433d67054d 100644 --- a/rpython/rtyper/annlowlevel.py +++ b/rpython/rtyper/annlowlevel.py @@ -471,6 +471,15 @@ def cast_instance_to_gcref(instance): return lltype.cast_opaque_ptr(llmemory.GCREF, cast_instance_to_base_ptr(instance)) +@specialize.argtype(0) +def cast_nongc_instance_to_base_ptr(instance): + from rpython.rtyper.rclass import NONGCOBJECTPTR + return cast_object_to_ptr(NONGCOBJECTPTR, instance) + +@specialize.argtype(0) +def cast_nongc_instance_to_adr(instance): + return llmemory.cast_ptr_to_adr(cast_nongc_instance_to_base_ptr(instance)) + class CastObjectToPtrEntry(extregistry.ExtRegistryEntry): _about_ = cast_object_to_ptr @@ -512,6 +521,8 @@ def cast_base_ptr_to_instance(Class, ptr): % (ptr, Class)) return ptr +cast_base_ptr_to_nongc_instance = cast_base_ptr_to_instance + @specialize.arg(0) def cast_gcref_to_instance(Class, ptr): """Reverse the hacking done in cast_instance_to_gcref().""" @@ -519,6 +530,12 @@ def cast_gcref_to_instance(Class, ptr): ptr = lltype.cast_opaque_ptr(OBJECTPTR, ptr) return cast_base_ptr_to_instance(Class, ptr) +@specialize.arg(0) +def cast_adr_to_nongc_instance(Class, ptr): + from rpython.rtyper.rclass import NONGCOBJECTPTR + ptr = llmemory.cast_adr_to_ptr(ptr, NONGCOBJECTPTR) + return cast_base_ptr_to_nongc_instance(Class, ptr) + class CastBasePtrToInstanceEntry(extregistry.ExtRegistryEntry): _about_ = cast_base_ptr_to_instance diff --git a/rpython/rtyper/callparse.py b/rpython/rtyper/callparse.py index 2016e86d8e..c44f6d9503 100644 --- a/rpython/rtyper/callparse.py +++ b/rpython/rtyper/callparse.py @@ -58,7 +58,7 @@ def callparse(rtyper, graph, hop, r_self=None): defs_h.append(ConstHolder(x)) try: holders = arguments.match_signature(signature, defs_h) - except ArgErr, e: + except ArgErr as e: raise TyperError("signature mismatch: %s: %s" % ( graph.name, e.getmsg())) diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py index 0e8f6e96be..098164e2d4 100644 --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -85,13 +85,13 @@ class LLInterpreter(object): try: try: retval = llframe.eval() - except LLException, e: + except LLException as e: log.error("LLEXCEPTION: %s" % (e, )) self.print_traceback() if self.tracer: self.tracer.dump('LLException: %s\n' % (e,)) raise - except Exception, e: + except Exception as e: if getattr(e, '_go_through_llinterp_uncaught_', False): raise log.error("AN ERROR OCCURED: %s" % (e, )) @@ -307,10 +307,10 @@ class LLFrame(object): for i, op in enumerate(block.operations): self.curr_operation_index = i self.eval_operation(op) - except LLException, e: + except LLException as e: if op is not block.raising_op: raise - except RuntimeError, e: + except RuntimeError as e: rstackovf.check_stack_overflow() # xxx fish fish fish for proper etype and evalue to use rtyper = self.llinterpreter.typer @@ -416,7 +416,7 @@ class LLFrame(object): vals.insert(0, operation.result.concretetype) try: retval = ophandler(*vals) - except LLException, e: + except LLException as e: # safety check check that the operation is allowed to raise that # exception if operation.opname in lloperation.LL_OPERATIONS: @@ -479,9 +479,9 @@ class LLFrame(object): obj = fptr._obj try: return obj._callable(*args) - except LLException, e: + except LLException as e: raise - except Exception, e: + except Exception as e: if getattr(e, '_go_through_llinterp_uncaught_', False): raise if getattr(obj, '_debugexc', False): @@ -720,6 +720,12 @@ class LLFrame(object): def op_gc_add_memory_pressure(self, size): self.heap.add_memory_pressure(size) + def op_gc_fq_next_dead(self, fq_tag): + return self.heap.gc_fq_next_dead(fq_tag) + + def op_gc_fq_register(self, fq_tag, obj): + self.heap.gc_fq_register(fq_tag, obj) + def op_gc_gettypeid(self, obj): return lloperation.llop.combine_ushort(lltype.Signed, self.heap.gettypeid(obj), 0) @@ -782,17 +788,21 @@ class LLFrame(object): def op_weakref_create(self, v_obj): def objgetter(): # special support for gcwrapper.py return self.getval(v_obj) + assert self.llinterpreter.typer.getconfig().translation.rweakref return self.heap.weakref_create_getlazy(objgetter) op_weakref_create.specialform = True def op_weakref_deref(self, PTRTYPE, obj): + assert self.llinterpreter.typer.getconfig().translation.rweakref return self.heap.weakref_deref(PTRTYPE, obj) op_weakref_deref.need_result_type = True def op_cast_ptr_to_weakrefptr(self, obj): + assert self.llinterpreter.typer.getconfig().translation.rweakref return llmemory.cast_ptr_to_weakrefptr(obj) def op_cast_weakrefptr_to_ptr(self, PTRTYPE, obj): + assert self.llinterpreter.typer.getconfig().translation.rweakref return llmemory.cast_weakrefptr_to_ptr(PTRTYPE, obj) op_cast_weakrefptr_to_ptr.need_result_type = True diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py index 8ea6ae332b..fdd9203bb2 100644 --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -231,17 +231,7 @@ def build_ctypes_array(A, delayed_builders, max_n=0): assert max_n >= 0 ITEM = A.OF ctypes_item = get_ctypes_type(ITEM, delayed_builders) - # Python 2.5 ctypes can raise OverflowError on 64-bit builds - for n in [maxint, 2**31]: - MAX_SIZE = n/64 - try: - PtrType = ctypes.POINTER(MAX_SIZE * ctypes_item) - except (OverflowError, AttributeError), e: - pass # ^^^ bah, blame ctypes - else: - break - else: - raise e + ctypes_item_ptr = ctypes.POINTER(ctypes_item) class CArray(ctypes.Structure): if is_emulated_long: @@ -265,35 +255,9 @@ def build_ctypes_array(A, delayed_builders, max_n=0): bigarray.length = n return bigarray - _ptrtype = None - - @classmethod - def _get_ptrtype(cls): - if cls._ptrtype: - return cls._ptrtype - # ctypes can raise OverflowError on 64-bit builds - # on windows it raises AttributeError even for 2**31 (_length_ missing) - if _MS_WINDOWS: - other_limit = 2**31-1 - else: - other_limit = 2**31 - for n in [maxint, other_limit]: - cls.MAX_SIZE = n / ctypes.sizeof(ctypes_item) - try: - cls._ptrtype = ctypes.POINTER(cls.MAX_SIZE * ctypes_item) - except (OverflowError, AttributeError), e: - pass - else: - break - else: - raise e - return cls._ptrtype - def _indexable(self, index): - PtrType = self._get_ptrtype() - assert index + 1 < self.MAX_SIZE - p = ctypes.cast(ctypes.pointer(self.items), PtrType) - return p.contents + p = ctypes.cast(self.items, ctypes_item_ptr) + return p def _getitem(self, index, boundscheck=True): if boundscheck: @@ -865,7 +829,7 @@ def lltype2ctypes(llobj, normalize=True): llinterp = LLInterpreter.current_interpreter try: llres = llinterp.eval_graph(container.graph, llargs) - except LLException, lle: + except LLException as lle: llinterp._store_exception(lle) return 0 #except: @@ -874,7 +838,7 @@ def lltype2ctypes(llobj, normalize=True): else: try: llres = container._callable(*llargs) - except LLException, lle: + except LLException as lle: llinterp = LLInterpreter.current_interpreter llinterp._store_exception(lle) return 0 @@ -1045,12 +1009,22 @@ def ctypes2lltype(T, cobj): container = _array_of_known_length(T.TO) container._storage = type(cobj)(cobj.contents) elif isinstance(T.TO, lltype.FuncType): + # cobj is a CFunctionType object. We naively think + # that it should be a function pointer. No no no. If + # it was read out of an array, say, then it is a *pointer* + # to a function pointer. In other words, the read doesn't + # read anything, it just takes the address of the function + # pointer inside the array. If later the array is modified + # or goes out of scope, then we crash. CTypes is fun. + # It works if we cast it now to an int and back. cobjkey = intmask(ctypes.cast(cobj, ctypes.c_void_p).value) if cobjkey in _int2obj: container = _int2obj[cobjkey] else: + name = getattr(cobj, '__name__', '?') + cobj = ctypes.cast(cobjkey, type(cobj)) _callable = get_ctypes_trampoline(T.TO, cobj) - return lltype.functionptr(T.TO, getattr(cobj, '__name__', '?'), + return lltype.functionptr(T.TO, name, _callable=_callable) elif isinstance(T.TO, lltype.OpaqueType): if T == llmemory.GCREF: @@ -1178,7 +1152,7 @@ if sys.platform == 'darwin': finally: try: os.unlink(ccout) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise res = re.search(expr, trace) diff --git a/rpython/rtyper/lltypesystem/lloperation.py b/rpython/rtyper/lltypesystem/lloperation.py index f761b8d4c1..40e8072020 100644 --- a/rpython/rtyper/lltypesystem/lloperation.py +++ b/rpython/rtyper/lltypesystem/lloperation.py @@ -476,6 +476,8 @@ LL_OPERATIONS = { 'gc_gettypeid' : LLOp(), 'gc_gcflag_extra' : LLOp(), 'gc_add_memory_pressure': LLOp(), + 'gc_fq_next_dead' : LLOp(), + 'gc_fq_register' : LLOp(), 'gc_rawrefcount_init': LLOp(), 'gc_rawrefcount_create_link_pypy': LLOp(), diff --git a/rpython/rtyper/lltypesystem/rbuilder.py b/rpython/rtyper/lltypesystem/rbuilder.py index 25e302c868..1dfb3c0686 100644 --- a/rpython/rtyper/lltypesystem/rbuilder.py +++ b/rpython/rtyper/lltypesystem/rbuilder.py @@ -1,5 +1,5 @@ from rpython.rlib import rgc, jit -from rpython.rlib.objectmodel import enforceargs +from rpython.rlib.objectmodel import enforceargs, dont_inline, always_inline from rpython.rlib.rarithmetic import ovfcheck, r_uint, intmask from rpython.rtyper.debug import ll_assert from rpython.rlib.unroll import unrolling_iterable @@ -37,15 +37,6 @@ from rpython.rtyper.annlowlevel import llstr, llunicode # ------------------------------------------------------------ -def dont_inline(func): - func._dont_inline_ = True - return func - -def always_inline(func): - func._always_inline_ = True - return func - - STRINGPIECE = lltype.GcStruct('stringpiece', ('buf', lltype.Ptr(STR)), ('prev_piece', lltype.Ptr(lltype.GcForwardReference()))) diff --git a/rpython/rtyper/lltypesystem/rffi.py b/rpython/rtyper/lltypesystem/rffi.py index ec0811d5a3..fb7fa2d5f9 100644 --- a/rpython/rtyper/lltypesystem/rffi.py +++ b/rpython/rtyper/lltypesystem/rffi.py @@ -475,7 +475,7 @@ for _name in 'short int long'.split(): TYPES += ['signed char', 'unsigned char', 'long long', 'unsigned long long', 'size_t', 'time_t', 'wchar_t', - 'uintptr_t', 'intptr_t', + 'uintptr_t', 'intptr_t', # C note: these two are _integer_ types 'void*'] # generic pointer type # This is a bit of a hack since we can't use rffi_platform here. diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py index 3a9535925c..b1e1f73e5e 100644 --- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py @@ -1405,6 +1405,45 @@ class TestLL2Ctypes(object): a2 = ctypes2lltype(lltype.Ptr(A), lltype2ctypes(a)) assert a2._obj.getitem(0)._obj._parentstructure() is a2._obj + def test_array_of_function_pointers(self): + c_source = py.code.Source(r""" + #include "src/precommondefs.h" + #include <stdio.h> + + typedef int(*funcptr_t)(void); + static int forty_two(void) { return 42; } + static int forty_three(void) { return 43; } + static funcptr_t testarray[2]; + RPY_EXPORTED void runtest(void cb(funcptr_t *)) { + testarray[0] = &forty_two; + testarray[1] = &forty_three; + fprintf(stderr, "&forty_two = %p\n", testarray[0]); + fprintf(stderr, "&forty_three = %p\n", testarray[1]); + cb(testarray); + testarray[0] = 0; + testarray[1] = 0; + } + """) + eci = ExternalCompilationInfo(include_dirs=[cdir], + separate_module_sources=[c_source]) + + PtrF = lltype.Ptr(lltype.FuncType([], rffi.INT)) + ArrayPtrF = rffi.CArrayPtr(PtrF) + CALLBACK = rffi.CCallback([ArrayPtrF], lltype.Void) + + runtest = rffi.llexternal('runtest', [CALLBACK], lltype.Void, + compilation_info=eci) + seen = [] + + def callback(testarray): + seen.append(testarray[0]) # read a PtrF out of testarray + seen.append(testarray[1]) + + runtest(callback) + assert seen[0]() == 42 + assert seen[1]() == 43 + + class TestPlatform(object): def test_lib_on_libpaths(self): from rpython.translator.platform import platform diff --git a/rpython/rtyper/rbuiltin.py b/rpython/rtyper/rbuiltin.py index 0c5a5b09c7..be12d29764 100644 --- a/rpython/rtyper/rbuiltin.py +++ b/rpython/rtyper/rbuiltin.py @@ -735,12 +735,21 @@ from rpython.rtyper.lltypesystem import llmemory @typer_for(llmemory.weakref_create) @typer_for(weakref.ref) def rtype_weakref_create(hop): - vlist = hop.inputargs(hop.args_r[0]) + from rpython.rtyper.rweakref import BaseWeakRefRepr + + v_inst, = hop.inputargs(hop.args_r[0]) hop.exception_cannot_occur() - return hop.genop('weakref_create', vlist, resulttype=llmemory.WeakRefPtr) + if isinstance(hop.r_result, BaseWeakRefRepr): + return hop.r_result._weakref_create(hop, v_inst) + else: + # low-level <PtrRepr * WeakRef> + assert hop.rtyper.getconfig().translation.rweakref + return hop.genop('weakref_create', [v_inst], + resulttype=llmemory.WeakRefPtr) @typer_for(llmemory.weakref_deref) def rtype_weakref_deref(hop): + assert hop.rtyper.getconfig().translation.rweakref c_ptrtype, v_wref = hop.inputargs(lltype.Void, hop.args_r[1]) assert v_wref.concretetype == llmemory.WeakRefPtr hop.exception_cannot_occur() @@ -748,6 +757,7 @@ def rtype_weakref_deref(hop): @typer_for(llmemory.cast_ptr_to_weakrefptr) def rtype_cast_ptr_to_weakrefptr(hop): + assert hop.rtyper.getconfig().translation.rweakref vlist = hop.inputargs(hop.args_r[0]) hop.exception_cannot_occur() return hop.genop('cast_ptr_to_weakrefptr', vlist, @@ -755,6 +765,7 @@ def rtype_cast_ptr_to_weakrefptr(hop): @typer_for(llmemory.cast_weakrefptr_to_ptr) def rtype_cast_weakrefptr_to_ptr(hop): + assert hop.rtyper.getconfig().translation.rweakref c_ptrtype, v_wref = hop.inputargs(lltype.Void, hop.args_r[1]) assert v_wref.concretetype == llmemory.WeakRefPtr hop.exception_cannot_occur() diff --git a/rpython/rtyper/rclass.py b/rpython/rtyper/rclass.py index ef8e253c17..6d574cc49c 100644 --- a/rpython/rtyper/rclass.py +++ b/rpython/rtyper/rclass.py @@ -587,7 +587,8 @@ class InstanceRepr(Repr): assert len(s_func.descriptions) == 1 funcdesc, = s_func.descriptions graph = funcdesc.getuniquegraph() - self.check_graph_of_del_does_not_call_too_much(graph) + self.check_graph_of_del_does_not_call_too_much(self.rtyper, + graph) FUNCTYPE = FuncType([Ptr(source_repr.object_type)], Void) destrptr = functionptr(FUNCTYPE, graph.name, graph=graph, @@ -859,7 +860,8 @@ class InstanceRepr(Repr): def can_ll_be_null(self, s_value): return s_value.can_be_none() - def check_graph_of_del_does_not_call_too_much(self, graph): + @staticmethod + def check_graph_of_del_does_not_call_too_much(rtyper, graph): # RPython-level __del__() methods should not do "too much". # In the PyPy Python interpreter, they usually do simple things # like file.__del__() closing the file descriptor; or if they @@ -872,7 +874,7 @@ class InstanceRepr(Repr): # # XXX wrong complexity, but good enough because the set of # reachable graphs should be small - callgraph = self.rtyper.annotator.translator.callgraph.values() + callgraph = rtyper.annotator.translator.callgraph.values() seen = {graph: None} while True: oldlength = len(seen) diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py index 63069db824..e1aae5c58f 100644 --- a/rpython/rtyper/rpbc.py +++ b/rpython/rtyper/rpbc.py @@ -544,6 +544,21 @@ class __extend__(pairtype(FunctionsPBCRepr, SmallFunctionSetPBCRepr)): ll_compress = compression_function(r_set) return llops.gendirectcall(ll_compress, v) +class __extend__(pairtype(FunctionReprBase, FunctionReprBase)): + def rtype_is_((robj1, robj2), hop): + if hop.s_result.is_constant(): + return inputconst(Bool, hop.s_result.const) + s_pbc = annmodel.unionof(robj1.s_pbc, robj2.s_pbc) + r_pbc = hop.rtyper.getrepr(s_pbc) + v1, v2 = hop.inputargs(r_pbc, r_pbc) + assert v1.concretetype == v2.concretetype + if v1.concretetype == Char: + return hop.genop('char_eq', [v1, v2], resulttype=Bool) + elif isinstance(v1.concretetype, Ptr): + return hop.genop('ptr_eq', [v1, v2], resulttype=Bool) + else: + raise TyperError("unknown type %r" % (v1.concretetype,)) + def conversion_table(r_from, r_to): if r_to in r_from._conversion_tables: diff --git a/rpython/rtyper/rweakref.py b/rpython/rtyper/rweakref.py index 994b482f1e..fce1d2164e 100644 --- a/rpython/rtyper/rweakref.py +++ b/rpython/rtyper/rweakref.py @@ -11,25 +11,22 @@ from rpython.rtyper.lltypesystem import lltype, llmemory class __extend__(annmodel.SomeWeakRef): def rtyper_makerepr(self, rtyper): - return WeakRefRepr(rtyper) + if rtyper.getconfig().translation.rweakref: + return WeakRefRepr(rtyper) + else: + return EmulatedWeakRefRepr(rtyper) def rtyper_makekey(self): return self.__class__, -class WeakRefRepr(Repr): - lowleveltype = llmemory.WeakRefPtr - dead_wref = llmemory.dead_wref - null_wref = lltype.nullptr(llmemory.WeakRef) +class BaseWeakRefRepr(Repr): def __init__(self, rtyper): self.rtyper = rtyper - if not rtyper.getconfig().translation.rweakref: - raise TyperError("RPython-level weakrefs are not supported by " - "this backend or GC policy") def convert_const(self, value): if value is None: - return self.null_wref + return lltype.nullptr(self.lowleveltype.TO) assert isinstance(value, weakref.ReferenceType) instance = value() @@ -39,8 +36,7 @@ class WeakRefRepr(Repr): else: repr = self.rtyper.bindingrepr(Constant(instance)) llinstance = repr.convert_const(instance) - return self._weakref_create(llinstance) - + return self.do_weakref_create(llinstance) def rtype_simple_call(self, hop): v_wref, = hop.inputargs(self) @@ -48,8 +44,53 @@ class WeakRefRepr(Repr): if hop.r_result.lowleveltype is lltype.Void: # known-to-be-dead weakref return hop.inputconst(lltype.Void, None) else: - return hop.genop('weakref_deref', [v_wref], - resulttype=hop.r_result) + assert v_wref.concretetype == self.lowleveltype + return self._weakref_deref(hop, v_wref) + + +class WeakRefRepr(BaseWeakRefRepr): + lowleveltype = llmemory.WeakRefPtr + dead_wref = llmemory.dead_wref - def _weakref_create(self, llinstance): + def do_weakref_create(self, llinstance): return llmemory.weakref_create(llinstance) + + def _weakref_create(self, hop, v_inst): + return hop.genop('weakref_create', [v_inst], + resulttype=llmemory.WeakRefPtr) + + def _weakref_deref(self, hop, v_wref): + return hop.genop('weakref_deref', [v_wref], + resulttype=hop.r_result) + + +class EmulatedWeakRefRepr(BaseWeakRefRepr): + """For the case rweakref=False, we emulate RPython-level weakrefs + with regular strong references (but not low-level weakrefs). + """ + lowleveltype = lltype.Ptr(lltype.GcStruct('EmulatedWeakRef', + ('ref', llmemory.GCREF))) + dead_wref = lltype.malloc(lowleveltype.TO, immortal=True, zero=True) + + def do_weakref_create(self, llinstance): + p = lltype.malloc(self.lowleveltype.TO, immortal=True) + p.ref = lltype.cast_opaque_ptr(llmemory.GCREF, llinstance) + return p + + def _weakref_create(self, hop, v_inst): + c_type = hop.inputconst(lltype.Void, self.lowleveltype.TO) + c_flags = hop.inputconst(lltype.Void, {'flavor': 'gc'}) + v_ptr = hop.genop('malloc', [c_type, c_flags], + resulttype=self.lowleveltype) + v_gcref = hop.genop('cast_opaque_ptr', [v_inst], + resulttype=llmemory.GCREF) + c_ref = hop.inputconst(lltype.Void, 'ref') + hop.genop('setfield', [v_ptr, c_ref, v_gcref]) + return v_ptr + + def _weakref_deref(self, hop, v_wref): + c_ref = hop.inputconst(lltype.Void, 'ref') + v_gcref = hop.genop('getfield', [v_wref, c_ref], + resulttype=llmemory.GCREF) + return hop.genop('cast_opaque_ptr', [v_gcref], + resulttype=hop.r_result) diff --git a/rpython/rtyper/test/test_exception.py b/rpython/rtyper/test/test_exception.py index 05d6ffb93d..2667bf39e9 100644 --- a/rpython/rtyper/test/test_exception.py +++ b/rpython/rtyper/test/test_exception.py @@ -48,7 +48,7 @@ class TestException(BaseRtypingTest): def f(n): try: g(n) - except IOError, e: + except IOError as e: assert e.errno == 0 assert e.strerror == "test" assert e.filename is None @@ -56,7 +56,7 @@ class TestException(BaseRtypingTest): assert False try: h(n) - except OSError, e: + except OSError as e: assert e.errno == 42 assert e.strerror == "?" assert e.filename is None @@ -92,7 +92,7 @@ class TestException(BaseRtypingTest): def f(n): try: assert n < 10 - except MyError, operr: + except MyError as operr: h(operr) res = self.interpret(f, [7]) assert res is None @@ -108,7 +108,7 @@ class TestException(BaseRtypingTest): raise OperationError(next_instr) try: raise BytecodeCorruption() - except OperationError, operr: + except OperationError as operr: next_instr -= operr.a py.test.raises(LLException, self.interpret, f, [10]) @@ -124,7 +124,7 @@ class TestException(BaseRtypingTest): raise OperationError(next_instr) try: raise bcerr - except OperationError, operr: + except OperationError as operr: next_instr -= operr.a py.test.raises(LLException, self.interpret, f, [10]) diff --git a/rpython/rtyper/test/test_generator.py b/rpython/rtyper/test/test_generator.py index 44ffe2f101..2fd16117aa 100644 --- a/rpython/rtyper/test/test_generator.py +++ b/rpython/rtyper/test/test_generator.py @@ -84,7 +84,7 @@ class TestGenerator(BaseRtypingTest): def g(c): try: h(c) - except Exception, e: + except Exception as e: if isinstance(e, ValueError): raise raise StopIteration diff --git a/rpython/rtyper/test/test_rlist.py b/rpython/rtyper/test/test_rlist.py index 2c33b7bc1f..d894636a55 100644 --- a/rpython/rtyper/test/test_rlist.py +++ b/rpython/rtyper/test/test_rlist.py @@ -845,7 +845,7 @@ class TestRlist(BaseRtypingTest): res2 = fn(i) res1 = self.interpret(fn, [i]) assert res1 == res2 - except Exception, e: + except Exception as e: self.interpret_raises(e.__class__, fn, [i]) def fn(i): @@ -863,7 +863,7 @@ class TestRlist(BaseRtypingTest): res2 = fn(i) res1 = self.interpret(fn, [i]) assert res1 == res2 - except Exception, e: + except Exception as e: self.interpret_raises(e.__class__, fn, [i]) diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py index 0e95042cd9..5b318c19a2 100644 --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1497,6 +1497,47 @@ class TestRPBC(BaseRtypingTest): res = self.interpret(f, [2]) assert res == False + def test_is_among_functions_2(self): + def g1(): pass + def g2(): pass + def f(n): + if n > 5: + g = g2 + else: + g = g1 + g() + return g is g2 + res = self.interpret(f, [2]) + assert res == False + res = self.interpret(f, [8]) + assert res == True + + def test_is_among_functions_3(self): + def g0(): pass + def g1(): pass + def g2(): pass + def g3(): pass + def g4(): pass + def g5(): pass + def g6(): pass + def g7(): pass + glist = [g0, g1, g2, g3, g4, g5, g6, g7] + def f(n): + if n > 5: + g = g2 + else: + g = g1 + h = glist[n] + g() + h() + return g is h + res = self.interpret(f, [2]) + assert res == False + res = self.interpret(f, [1]) + assert res == True + res = self.interpret(f, [6]) + assert res == False + def test_shrink_pbc_set(self): def g1(): return 10 diff --git a/rpython/rtyper/test/test_rweakref.py b/rpython/rtyper/test/test_rweakref.py index df424d3cd3..82fa43aec1 100644 --- a/rpython/rtyper/test/test_rweakref.py +++ b/rpython/rtyper/test/test_rweakref.py @@ -138,3 +138,22 @@ class TestRweakref(BaseRtypingTest): res = self.interpret(f, []) assert res == lltype.nullptr(S) + + +class TestRWeakrefDisabled(BaseRtypingTest): + def test_no_real_weakref(self): + class A: + pass + a1 = A() + mylist = [weakref.ref(a1), None] + def g(): + a2 = A() + return weakref.ref(a2) + def fn(i): + w = g() + rgc.collect() + assert w() is not None + return mylist[i] is None + + assert self.interpret(fn, [0], rweakref=False) is False + assert self.interpret(fn, [1], rweakref=False) is True diff --git a/rpython/rtyper/tool/genrffi.py b/rpython/rtyper/tool/genrffi.py index e7b43cc10e..63efd7c4f1 100755 --- a/rpython/rtyper/tool/genrffi.py +++ b/rpython/rtyper/tool/genrffi.py @@ -108,9 +108,9 @@ class RffiBuilder(object): if isinstance(value, ctypes._CFuncPtr): try: self.proc_func(value) - except NotImplementedError, e: + except NotImplementedError as e: print "genrffi: skipped:", key, value, e - except TypeError, e: + except TypeError as e: print "genrffi: skipped:", key, value, e diff --git a/rpython/rtyper/tool/rffi_platform.py b/rpython/rtyper/tool/rffi_platform.py index eade70e2bd..38090a51a7 100755 --- a/rpython/rtyper/tool/rffi_platform.py +++ b/rpython/rtyper/tool/rffi_platform.py @@ -836,7 +836,7 @@ def configure_external_library(name, eci, configurations, # verify that this eci can be compiled try: verify_eci(eci_lib) - except CompilationError, e: + except CompilationError as e: last_error = e else: _cache[key] = eci_lib diff --git a/rpython/tool/algo/bitstring.py b/rpython/tool/algo/bitstring.py new file mode 100644 index 0000000000..95afe1b061 --- /dev/null +++ b/rpython/tool/algo/bitstring.py @@ -0,0 +1,23 @@ + + +def make_bitstring(lst): + "NOT_RPYTHON" + if not lst: + return '' + num_bits = max(lst) + 1 + num_bytes = (num_bits + 7) // 8 + entries = [0] * num_bytes + for x in lst: + assert x >= 0 + entries[x >> 3] |= 1 << (x & 7) + return ''.join(map(chr, entries)) + +def bitcheck(bitstring, n): + assert n >= 0 + byte_number = n >> 3 + if byte_number >= len(bitstring): + return False + return (ord(bitstring[byte_number]) & (1 << (n & 7))) != 0 + +def num_bits(bitstring): + return len(bitstring) << 3 diff --git a/rpython/tool/algo/test/test_bitstring.py b/rpython/tool/algo/test/test_bitstring.py new file mode 100644 index 0000000000..8fd7846700 --- /dev/null +++ b/rpython/tool/algo/test/test_bitstring.py @@ -0,0 +1,25 @@ +from rpython.tool.algo.bitstring import * +from hypothesis import given, strategies + +def test_make(): + assert make_bitstring([]) == '' + assert make_bitstring([0]) == '\x01' + assert make_bitstring([7]) == '\x80' + assert make_bitstring([8]) == '\x00\x01' + assert make_bitstring([2, 4, 20]) == '\x14\x00\x10' + +def test_bitcheck(): + assert bitcheck('\x01', 0) is True + assert bitcheck('\x01', 1) is False + assert bitcheck('\x01', 10) is False + assert [n for n in range(32) if bitcheck('\x14\x00\x10', n)] == [2, 4, 20] + +@given(strategies.lists(strategies.integers(min_value=0, max_value=299))) +def test_random(lst): + bitstring = make_bitstring(lst) + assert set([n for n in range(300) if bitcheck(bitstring, n)]) == set(lst) + +def test_num_bits(): + assert num_bits('') == 0 + assert num_bits('a') == 8 + assert num_bits('bcd') == 24 diff --git a/rpython/tool/disassembler.py b/rpython/tool/disassembler.py index f5d3e2d4ba..21e97f1fc8 100644 --- a/rpython/tool/disassembler.py +++ b/rpython/tool/disassembler.py @@ -87,7 +87,7 @@ def dis(x=None): print("Disassembly of %s:" % name) try: dis(x1) - except TypeError, msg: + except TypeError as msg: print("Sorry:", msg) print() elif hasattr(x, 'co_code'): diff --git a/rpython/tool/error.py b/rpython/tool/error.py index 5be0ee8cd4..ad34d66738 100644 --- a/rpython/tool/error.py +++ b/rpython/tool/error.py @@ -103,7 +103,7 @@ def format_simple_call(annotator, oper, msg): msg.append("Occurred processing the following simple_call:") try: descs = annotator.binding(oper.args[0]).descriptions - except (KeyError, AttributeError), e: + except (KeyError, AttributeError) as e: msg.append(" (%s getting at the binding!)" % ( e.__class__.__name__,)) return diff --git a/rpython/tool/frozenlist.py b/rpython/tool/frozenlist.py index 5ce7c88abf..a43d7e05c9 100644 --- a/rpython/tool/frozenlist.py +++ b/rpython/tool/frozenlist.py @@ -1,7 +1,7 @@ from rpython.tool.sourcetools import func_with_new_name def forbid(*args): - raise TypeError, "cannot mutate a frozenlist" + raise TypeError("cannot mutate a frozenlist") class frozenlist(list): __setitem__ = func_with_new_name(forbid, '__setitem__') diff --git a/rpython/tool/leakfinder.py b/rpython/tool/leakfinder.py index 879961291e..0d1b719844 100644 --- a/rpython/tool/leakfinder.py +++ b/rpython/tool/leakfinder.py @@ -37,13 +37,13 @@ def start_tracking_allocations(): ALLOCATED.clear() return result -def stop_tracking_allocations(check, prev=None): +def stop_tracking_allocations(check, prev=None, do_collection=gc.collect): global TRACK_ALLOCATIONS assert TRACK_ALLOCATIONS for i in range(5): if not ALLOCATED: break - gc.collect() + do_collection() result = ALLOCATED.copy() ALLOCATED.clear() if prev is None: diff --git a/rpython/tool/terminal.py b/rpython/tool/terminal.py index 53e99739c0..317f4a8401 100644 --- a/rpython/tool/terminal.py +++ b/rpython/tool/terminal.py @@ -77,7 +77,7 @@ def render(text): try: import curses setup() -except Exception, e: +except Exception as e: # There is a failure; set all attributes to default print 'Warning: %s' % e default() diff --git a/rpython/tool/test/test_error.py b/rpython/tool/test/test_error.py index aee16ad604..e72c495108 100644 --- a/rpython/tool/test/test_error.py +++ b/rpython/tool/test/test_error.py @@ -55,6 +55,6 @@ def test_someobject_from_call(): try: compile_function(fn, [int]) - except UnionError, e: + except UnionError as e: assert 'function one' in str(e) assert 'function two' in str(e) diff --git a/rpython/translator/backendopt/all.py b/rpython/translator/backendopt/all.py index b7f436a6d8..81ce5efdf5 100644 --- a/rpython/translator/backendopt/all.py +++ b/rpython/translator/backendopt/all.py @@ -20,7 +20,7 @@ def get_function(dottedname): name = parts[-1] try: mod = __import__(module, {}, {}, ['__doc__']) - except ImportError, e: + except ImportError as e: raise Exception("Import error loading %s: %s" % (dottedname, e)) try: diff --git a/rpython/translator/backendopt/finalizer.py b/rpython/translator/backendopt/finalizer.py index 25450347de..0a7a5c5361 100644 --- a/rpython/translator/backendopt/finalizer.py +++ b/rpython/translator/backendopt/finalizer.py @@ -1,30 +1,43 @@ - from rpython.translator.backendopt import graphanalyze from rpython.rtyper.lltypesystem import lltype +from rpython.tool.ansi_print import AnsiLogger + +log = AnsiLogger("finalizer") + class FinalizerError(Exception): - """ __del__ marked as lightweight finalizer, but the analyzer did - not agree - """ + """__del__() is used for lightweight RPython destructors, + but the FinalizerAnalyzer found that it is not lightweight. -class FinalizerAnalyzer(graphanalyze.BoolGraphAnalyzer): - """ Analyzer that determines whether a finalizer is lightweight enough - so it can be called without all the complicated logic in the garbage - collector. The set of operations here is restrictive for a good reason + The set of allowed operations is restrictive for a good reason - it's better to be safe. Specifically disallowed operations: * anything that escapes self * anything that can allocate """ + +class FinalizerAnalyzer(graphanalyze.BoolGraphAnalyzer): + """ Analyzer that determines whether a finalizer is lightweight enough + so it can be called without all the complicated logic in the garbage + collector. + """ ok_operations = ['ptr_nonzero', 'ptr_eq', 'ptr_ne', 'free', 'same_as', 'direct_ptradd', 'force_cast', 'track_alloc_stop', - 'raw_free', 'adr_eq', 'adr_ne'] + 'raw_free', 'adr_eq', 'adr_ne', + 'debug_print'] def analyze_light_finalizer(self, graph): - result = self.analyze_direct_call(graph) - if (result is self.top_result() and - getattr(graph.func, '_must_be_light_finalizer_', False)): - raise FinalizerError(FinalizerError.__doc__, graph) + if getattr(graph.func, '_must_be_light_finalizer_', False): + self._must_be_light = graph + result = self.analyze_direct_call(graph) + del self._must_be_light + if result is self.top_result(): + msg = '%s\nIn %r' % (FinalizerError.__doc__, graph) + raise FinalizerError(msg) + else: + result = self.analyze_direct_call(graph) + if result is self.top_result(): + log.red('old-style non-light finalizer: %r' % (graph,)) return result def analyze_simple_operation(self, op, graphinfo): @@ -43,4 +56,9 @@ class FinalizerAnalyzer(graphanalyze.BoolGraphAnalyzer): if not isinstance(TP, lltype.Ptr) or TP.TO._gckind == 'raw': # primitive type return self.bottom_result() - return self.top_result() + + if not hasattr(self, '_must_be_light'): + return self.top_result() + msg = '%s\nFound this forbidden operation:\n%r\nin %r\nfrom %r' % ( + FinalizerError.__doc__, op, graphinfo, self._must_be_light) + raise FinalizerError(msg) diff --git a/rpython/translator/backendopt/inline.py b/rpython/translator/backendopt/inline.py index e7a97a876e..c90fd6b7ac 100644 --- a/rpython/translator/backendopt/inline.py +++ b/rpython/translator/backendopt/inline.py @@ -679,7 +679,7 @@ def auto_inlining(translator, threshold=None, call_count_pred, cleanup=False) to_cleanup[parentgraph] = True res = bool(subcount) - except CannotInline, e: + except CannotInline as e: try_again[graph] = str(e) res = CannotInline if res is True: diff --git a/rpython/translator/backendopt/mallocv.py b/rpython/translator/backendopt/mallocv.py index dda732d700..1483dc5654 100644 --- a/rpython/translator/backendopt/mallocv.py +++ b/rpython/translator/backendopt/mallocv.py @@ -337,10 +337,10 @@ class MallocVirtualizer(object): graphbuilder.start_from_a_malloc(graph, block, op.result) try: graphbuilder.propagate_specializations() - except CannotVirtualize, e: + except CannotVirtualize as e: self.logresult(op, 'failed', e) return False - except ForcedInline, e: + except ForcedInline as e: self.logresult(op, 'forces inlining', e) self.inline_and_remove[graph] = op self.inline_and_remove_seen[graph, op] = True @@ -396,11 +396,11 @@ class MallocVirtualizer(object): self.specialized_graphs[key] = ('call', specgraph) try: graphbuilder.propagate_specializations() - except ForcedInline, e: + except ForcedInline as e: if self.verbose: log.mallocv('%s inlined: %s' % (graph.name, e)) self.specialized_graphs[key] = ('inline', None) - except CannotVirtualize, e: + except CannotVirtualize as e: if self.verbose: log.mallocv('%s failing: %s' % (graph.name, e)) self.specialized_graphs[key] = ('fail', None) @@ -1036,7 +1036,7 @@ def try_fold_operation(opname, args_v, RESTYPE): pass except (KeyboardInterrupt, SystemExit): raise - except Exception, e: + except Exception as e: pass #log.WARNING('constant-folding %s%r:' % (opname, args_v)) #log.WARNING(' %s: %s' % (e.__class__.__name__, e)) diff --git a/rpython/translator/backendopt/test/test_finalizer.py b/rpython/translator/backendopt/test/test_finalizer.py index 9737896fa4..120f6a1875 100644 --- a/rpython/translator/backendopt/test/test_finalizer.py +++ b/rpython/translator/backendopt/test/test_finalizer.py @@ -35,31 +35,6 @@ class TestFinalizerAnalyzer(object): r = self.analyze(f, []) assert not r -def test_various_ops(): - from rpython.flowspace.model import SpaceOperation, Constant - - X = lltype.Ptr(lltype.GcStruct('X')) - Z = lltype.Ptr(lltype.Struct('Z')) - S = lltype.GcStruct('S', ('x', lltype.Signed), - ('y', X), - ('z', Z)) - v1 = varoftype(lltype.Bool) - v2 = varoftype(lltype.Signed) - f = FinalizerAnalyzer(None) - r = f.analyze(SpaceOperation('cast_int_to_bool', [v2], - v1)) - assert not r - v1 = varoftype(lltype.Ptr(S)) - v2 = varoftype(lltype.Signed) - v3 = varoftype(X) - v4 = varoftype(Z) - assert not f.analyze(SpaceOperation('bare_setfield', [v1, Constant('x'), - v2], None)) - assert f.analyze(SpaceOperation('bare_setfield', [v1, Constant('y'), - v3], None)) - assert not f.analyze(SpaceOperation('bare_setfield', [v1, Constant('z'), - v4], None)) - def test_malloc(self): S = lltype.GcStruct('S') @@ -100,6 +75,22 @@ def test_various_ops(): lltype.free(p, flavor='raw') r = self.analyze(g, [], f, backendopt=True) + assert r + + def test_c_call_without_release_gil(self): + C = rffi.CArray(lltype.Signed) + c = rffi.llexternal('x', [lltype.Ptr(C)], lltype.Signed, + releasegil=False) + + def g(): + p = lltype.malloc(C, 3, flavor='raw') + f(p) + + def f(p): + c(rffi.ptradd(p, 0)) + lltype.free(p, flavor='raw') + + r = self.analyze(g, [], f, backendopt=True) assert not r def test_chain(self): @@ -131,3 +122,30 @@ def test_various_ops(): pass self.analyze(g, []) # did not explode py.test.raises(FinalizerError, self.analyze, f, []) + + +def test_various_ops(): + from rpython.flowspace.model import SpaceOperation, Constant + + X = lltype.Ptr(lltype.GcStruct('X')) + Z = lltype.Ptr(lltype.Struct('Z')) + S = lltype.GcStruct('S', ('x', lltype.Signed), + ('y', X), + ('z', Z)) + v1 = varoftype(lltype.Bool) + v2 = varoftype(lltype.Signed) + f = FinalizerAnalyzer(None) + r = f.analyze(SpaceOperation('cast_int_to_bool', [v2], + v1)) + assert not r + v1 = varoftype(lltype.Ptr(S)) + v2 = varoftype(lltype.Signed) + v3 = varoftype(X) + v4 = varoftype(Z) + assert not f.analyze(SpaceOperation('bare_setfield', [v1, Constant('x'), + v2], None)) + assert f.analyze(SpaceOperation('bare_setfield', [v1, Constant('y'), + v3], None)) + assert not f.analyze(SpaceOperation('bare_setfield', [v1, Constant('z'), + v4], None)) + diff --git a/rpython/translator/backendopt/test/test_mallocv.py b/rpython/translator/backendopt/test/test_mallocv.py index 1ef8d08bc1..0e9f18f8af 100644 --- a/rpython/translator/backendopt/test/test_mallocv.py +++ b/rpython/translator/backendopt/test/test_mallocv.py @@ -205,7 +205,7 @@ class TestMallocRemoval(object): a.n = 10 try: g(n) # this call should not be inlined - except E, e: + except E as e: a.n = e.n return a.n self.check(f, [int], [15], 10, expected_calls=1) @@ -222,7 +222,7 @@ class TestMallocRemoval(object): e1 = E(n) try: raise e1 - except E, e: + except E as e: a.n = e.n return a.n self.check(f, [int], [15], 15) @@ -308,7 +308,7 @@ class TestMallocRemoval(object): a.n = n try: g(a) # this call should be inlined - except E, e: + except E as e: a.n = e.n return a.n self.check(f, [int], [15], 14, expected_calls=0) diff --git a/rpython/translator/backendopt/test/test_writeanalyze.py b/rpython/translator/backendopt/test/test_writeanalyze.py index d56a8e646d..40b97d3240 100644 --- a/rpython/translator/backendopt/test/test_writeanalyze.py +++ b/rpython/translator/backendopt/test/test_writeanalyze.py @@ -1,3 +1,4 @@ +import py from rpython.rtyper.lltypesystem import lltype from rpython.translator.translator import TranslationContext, graphof from rpython.translator.backendopt.writeanalyze import WriteAnalyzer, top_set @@ -313,6 +314,39 @@ class TestLLtypeReadWriteAnalyze(BaseTest): assert name2.endswith("x") assert T1 == T2 + def test_cutoff(self): + py.test.skip("cutoff: disabled") + from rpython.rlib.unroll import unrolling_iterable + cutoff = 20 + attrs = unrolling_iterable(["s%s" % i for i in range(cutoff + 5)]) + + class A(object): + def __init__(self, y): + for attr in attrs: + setattr(self, attr, y) + def f(self): + self.x = 1 + res = 0 + for attr in attrs: + res += getattr(self, attr) + return res + + def h(flag): + obj = A(flag) + return obj.f() + + t, wa = self.translate(h, [int]) + wa.cutoff = cutoff + hgraph = graphof(t, h) + op_call_f = hgraph.startblock.operations[-1] + + # check that we fished the expected ops + assert op_call_f.opname == "direct_call" + assert op_call_f.args[0].value._obj._name == 'A.f' + + result = wa.analyze(op_call_f) + assert result is top_set + def test_contains(self): def g(x, y, z): l = [x] diff --git a/rpython/translator/backendopt/writeanalyze.py b/rpython/translator/backendopt/writeanalyze.py index 5788f10da5..1d35378859 100644 --- a/rpython/translator/backendopt/writeanalyze.py +++ b/rpython/translator/backendopt/writeanalyze.py @@ -4,9 +4,15 @@ from rpython.translator.backendopt import graphanalyze top_set = object() empty_set = frozenset() -CUTOFF = 1000 +# CUTOFF is disabled, as it gave a strangely not-working-any-more effect +# if the size of the result grows past that bound. The main user was +# optimizeopt/heap.py (force_from_effectinfo), which has been rewritten +# to be happy with any size now. +#CUTOFF = 3000 class WriteAnalyzer(graphanalyze.GraphAnalyzer): + #cutoff = CUTOFF + def bottom_result(self): return empty_set @@ -22,9 +28,9 @@ class WriteAnalyzer(graphanalyze.GraphAnalyzer): def add_to_result(self, result, other): if other is top_set: return top_set - if len(other) + len(result) > CUTOFF: - return top_set result.update(other) + #if len(result) > self.cutoff: + # return top_set return result def finalize_builder(self, result): diff --git a/rpython/translator/c/src/mem.h b/rpython/translator/c/src/mem.h index 42993fd8ea..e5619ae52e 100644 --- a/rpython/translator/c/src/mem.h +++ b/rpython/translator/c/src/mem.h @@ -109,6 +109,9 @@ RPY_EXTERN void boehm_gc_finalizer_notifier(void); #define OP_GC__ENABLE_FINALIZERS(r) (boehm_gc_finalizer_lock--, \ boehm_gc_finalizer_notifier()) +#define OP_GC_FQ_REGISTER(tag, obj, r) /* ignored so far */ +#define OP_GC_FQ_NEXT_DEAD(tag, r) (r = NULL) + #endif /* PYPY_USING_BOEHM_GC */ @@ -121,6 +124,8 @@ RPY_EXTERN void boehm_gc_finalizer_notifier(void); #define GC_REGISTER_FINALIZER(a, b, c, d, e) /* nothing */ #define GC_gcollect() /* nothing */ #define GC_set_max_heap_size(a) /* nothing */ +#define OP_GC_FQ_REGISTER(tag, obj, r) /* nothing */ +#define OP_GC_FQ_NEXT_DEAD(tag, r) (r = NULL) #endif /************************************************************/ diff --git a/rpython/translator/c/src/thread_nt.c b/rpython/translator/c/src/thread_nt.c index 418289ceb5..d97fd324bc 100644 --- a/rpython/translator/c/src/thread_nt.c +++ b/rpython/translator/c/src/thread_nt.c @@ -18,7 +18,8 @@ typedef struct RPyOpaque_ThreadLock NRMUTEX, *PNRMUTEX; typedef struct { - void (*func)(void); + void (*func)(void *); + void *arg; long id; HANDLE done; } callobj; @@ -30,20 +31,29 @@ bootstrap(void *call) { callobj *obj = (callobj*)call; /* copy callobj since other thread might free it before we're done */ - void (*func)(void) = obj->func; + void (*func)(void *) = obj->func; + void *arg = obj->arg; obj->id = GetCurrentThreadId(); ReleaseSemaphore(obj->done, 1, NULL); - func(); + func(arg); } long RPyThreadStart(void (*func)(void)) { + /* a kind-of-invalid cast, but the 'func' passed here doesn't expect + any argument, so it's unlikely to cause problems */ + return RPyThreadStartEx((void(*)(void *))func, NULL); +} + +long RPyThreadStartEx(void (*func)(void *), void *arg) +{ unsigned long rv; callobj obj; obj.id = -1; /* guilty until proved innocent */ obj.func = func; + obj.arg = arg; obj.done = CreateSemaphore(NULL, 0, 1, NULL); if (obj.done == NULL) return -1; diff --git a/rpython/translator/c/src/thread_nt.h b/rpython/translator/c/src/thread_nt.h index fa0ed6c9ce..6d6f44a4a7 100644 --- a/rpython/translator/c/src/thread_nt.h +++ b/rpython/translator/c/src/thread_nt.h @@ -15,6 +15,8 @@ typedef struct RPyOpaque_ThreadLock { RPY_EXTERN long RPyThreadStart(void (*func)(void)); RPY_EXTERN +long RPyThreadStartEx(void (*func)(void *), void *arg); +RPY_EXTERN int RPyThreadLockInit(struct RPyOpaque_ThreadLock *lock); RPY_EXTERN void RPyOpaqueDealloc_ThreadLock(struct RPyOpaque_ThreadLock *lock); diff --git a/rpython/translator/c/src/thread_pthread.c b/rpython/translator/c/src/thread_pthread.c index 41f8d5d586..241dd122c6 100644 --- a/rpython/translator/c/src/thread_pthread.c +++ b/rpython/translator/c/src/thread_pthread.c @@ -58,13 +58,14 @@ static long _pypythread_stacksize = 0; -static void *bootstrap_pthread(void *func) +long RPyThreadStart(void (*func)(void)) { - ((void(*)(void))func)(); - return NULL; + /* a kind-of-invalid cast, but the 'func' passed here doesn't expect + any argument, so it's unlikely to cause problems */ + return RPyThreadStartEx((void(*)(void *))func, NULL); } -long RPyThreadStart(void (*func)(void)) +long RPyThreadStartEx(void (*func)(void *), void *arg) { pthread_t th; int status; @@ -94,8 +95,12 @@ long RPyThreadStart(void (*func)(void)) #else (pthread_attr_t*)NULL, #endif - bootstrap_pthread, - (void *)func + /* the next line does an invalid cast: pthread_create() will see a + function that returns random garbage. The code is the same as + CPython: this random garbage will be stored for pthread_join() + to return, but in this case pthread_join() is never called. */ + (void* (*)(void *))func, + (void *)arg ); #if defined(THREAD_STACK_SIZE) || defined(PTHREAD_SYSTEM_SCHED_SUPPORTED) diff --git a/rpython/translator/c/src/thread_pthread.h b/rpython/translator/c/src/thread_pthread.h index d18aff4689..08048117e0 100644 --- a/rpython/translator/c/src/thread_pthread.h +++ b/rpython/translator/c/src/thread_pthread.h @@ -62,6 +62,8 @@ struct RPyOpaque_ThreadLock { RPY_EXTERN long RPyThreadStart(void (*func)(void)); RPY_EXTERN +long RPyThreadStartEx(void (*func)(void *), void *arg); +RPY_EXTERN int RPyThreadLockInit(struct RPyOpaque_ThreadLock *lock); RPY_EXTERN void RPyOpaqueDealloc_ThreadLock(struct RPyOpaque_ThreadLock *lock); diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h index 63c200adfd..167bfb67b7 100644 --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -53,6 +53,13 @@ RPY_EXTERN __thread struct pypy_threadlocal_s pypy_threadlocal; r = _RPython_ThreadLocals_Build(); \ } while (0) +#define _OP_THREADLOCALREF_ADDR_SIGHANDLER(r) \ + do { \ + r = (char *)&pypy_threadlocal; \ + if (pypy_threadlocal.ready != 42) \ + r = NULL; \ + } while (0) + #define RPY_THREADLOCALREF_ENSURE() \ if (pypy_threadlocal.ready != 42) \ (void)_RPython_ThreadLocals_Build(); @@ -87,6 +94,11 @@ typedef DWORD pthread_key_t; r = _RPython_ThreadLocals_Build(); \ } while (0) +#define _OP_THREADLOCALREF_ADDR_SIGHANDLER(r) \ + do { \ + r = (char *)_RPy_ThreadLocals_Get(); \ + } while (0) + #define RPY_THREADLOCALREF_ENSURE() \ if (!_RPy_ThreadLocals_Get()) \ (void)_RPython_ThreadLocals_Build(); diff --git a/rpython/translator/c/test/test_boehm.py b/rpython/translator/c/test/test_boehm.py index f89cb2827c..256fb16123 100644 --- a/rpython/translator/c/test/test_boehm.py +++ b/rpython/translator/c/test/test_boehm.py @@ -2,7 +2,7 @@ import weakref import py -from rpython.rlib import rgc +from rpython.rlib import rgc, debug from rpython.rlib.objectmodel import (keepalive_until_here, compute_unique_id, compute_hash, current_object_addr_as_int) from rpython.rtyper.lltypesystem import lltype, llmemory @@ -392,3 +392,23 @@ class TestUsingBoehm(AbstractGCTestClass): assert res[2] != compute_hash(c) # likely assert res[3] == compute_hash(d) assert res[4] == compute_hash(("Hi", None, (7.5, 2, d))) + + def test_finalizer_queue_is_at_least_ignored(self): + class A(object): + pass + class FQ(rgc.FinalizerQueue): + Class = A + def finalizer_trigger(self): + debug.debug_print("hello!") # not called so far + fq = FQ() + # + def fn(): + fq.register_finalizer(A()) + rgc.collect() + rgc.collect() + fq.next_dead() + return 42 + + f = self.getcompiled(fn) + res = f() + assert res == 42 diff --git a/rpython/translator/c/test/test_exception.py b/rpython/translator/c/test/test_exception.py index bd3afcbf2d..024cec8fce 100644 --- a/rpython/translator/c/test/test_exception.py +++ b/rpython/translator/c/test/test_exception.py @@ -142,7 +142,7 @@ def test_reraise_exception(): raise_something(n) except A: raise # go through - except Exception, e: + except Exception as e: return 100 return -1 diff --git a/rpython/translator/c/test/test_extfunc.py b/rpython/translator/c/test/test_extfunc.py index 031b953b5d..ca3d474022 100644 --- a/rpython/translator/c/test/test_extfunc.py +++ b/rpython/translator/c/test/test_extfunc.py @@ -183,7 +183,7 @@ def test_os_stat_raises_winerror(): def call_stat(): try: os.stat("nonexistentdir/nonexistentfile") - except WindowsError, e: + except WindowsError as e: return e.winerror return 0 f = compile(call_stat, []) @@ -612,7 +612,7 @@ if hasattr(os, 'getlogin'): try: expected = os.getlogin() - except OSError, e: + except OSError as e: py.test.skip("the underlying os.getlogin() failed: %s" % e) f1 = compile(does_stuff, []) assert f1() == expected diff --git a/rpython/translator/c/test/test_standalone.py b/rpython/translator/c/test/test_standalone.py index ced2d7b282..98af08b3f0 100644 --- a/rpython/translator/c/test/test_standalone.py +++ b/rpython/translator/c/test/test_standalone.py @@ -1162,7 +1162,7 @@ class TestThread(object): print >> sys.stderr, 'Trying with %d KB of stack...' % (test_kb,), try: data = cbuilder.cmdexec(str(test_kb * 1024)) - except Exception, e: + except Exception as e: if e.__class__ is not Exception: raise print >> sys.stderr, 'segfault' diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py index a5ffdbd523..9bd4241837 100644 --- a/rpython/translator/driver.py +++ b/rpython/translator/driver.py @@ -398,7 +398,7 @@ class TranslationDriver(SimpleTaskEngine): from rpython.translator.platform import CompilationError try: configure_boehm(self.translator.platform) - except CompilationError, e: + except CompilationError as e: i = 'Boehm GC not installed. Try e.g. "translate.py --gc=minimark"' raise Exception(str(e) + '\n' + i) @@ -552,16 +552,16 @@ class TranslationDriver(SimpleTaskEngine): self.log.info('usession directory: %s' % (udir,)) return result - @staticmethod - def from_targetspec(targetspec_dic, config=None, args=None, + @classmethod + def from_targetspec(cls, targetspec_dic, config=None, args=None, empty_translator=None, disable=[], default_goal=None): if args is None: args = [] - driver = TranslationDriver(config=config, default_goal=default_goal, - disable=disable) + driver = cls(config=config, default_goal=default_goal, + disable=disable) target = targetspec_dic['target'] spec = target(driver, args) diff --git a/rpython/translator/goal/unixcheckpoint.py b/rpython/translator/goal/unixcheckpoint.py index 7cec559052..3830eb792c 100644 --- a/rpython/translator/goal/unixcheckpoint.py +++ b/rpython/translator/goal/unixcheckpoint.py @@ -17,7 +17,7 @@ def restartable_point_fork(auto=None, extra_msg=None): else: try: line = raw_input().strip().lower() - except (KeyboardInterrupt, EOFError), e: + except (KeyboardInterrupt, EOFError) as e: print '(%s ignored)' % e.__class__.__name__ continue if line in ('run', 'cont'): @@ -28,7 +28,7 @@ def restartable_point_fork(auto=None, extra_msg=None): try: import pdb; pdb.set_trace() dummy_for_pdb = 1 # for pdb to land - except Exception, e: + except Exception as e: print '(%s ignored)' % e.__class__.__name__ continue if line == 'restart-it-all': diff --git a/rpython/translator/platform/posix.py b/rpython/translator/platform/posix.py index bcddbefe85..cafc9b1f0d 100644 --- a/rpython/translator/platform/posix.py +++ b/rpython/translator/platform/posix.py @@ -61,7 +61,7 @@ class BasePosix(Platform): def _pkg_config(self, lib, opt, default, check_result_dir=False): try: ret, out, err = _run_subprocess("pkg-config", [lib, opt]) - except OSError, e: + except OSError as e: err = str(e) ret = 1 if ret: diff --git a/rpython/translator/platform/test/test_platform.py b/rpython/translator/platform/test/test_platform.py index 46093014bc..0cdb3d8a87 100644 --- a/rpython/translator/platform/test/test_platform.py +++ b/rpython/translator/platform/test/test_platform.py @@ -64,7 +64,7 @@ class TestPlatform(object): cfile.write('') try: executable = self.platform.compile([cfile], ExternalCompilationInfo()) - except CompilationError, e: + except CompilationError as e: filename = cfile.dirpath().join(cfile.purebasename + '.errors') assert filename.read('r') == e.err else: diff --git a/rpython/translator/sandbox/sandlib.py b/rpython/translator/sandbox/sandlib.py index d226e1afb7..eed639cc5c 100644 --- a/rpython/translator/sandbox/sandlib.py +++ b/rpython/translator/sandbox/sandlib.py @@ -235,14 +235,14 @@ class SandboxedProc(object): try: fnname = read_message(child_stdout) args = read_message(child_stdout) - except EOFError, e: + except EOFError as e: break if self.log and not self.is_spam(fnname, *args): self.log.call('%s(%s)' % (fnname, ', '.join([shortrepr(x) for x in args]))) try: answer, resulttype = self.handle_message(fnname, *args) - except Exception, e: + except Exception as e: tb = sys.exc_info()[2] write_exception(child_stdin, e, tb) if self.log: @@ -445,7 +445,7 @@ class VirtualizedSandboxedProc(SandboxedProc): def do_ll_os__ll_os_access(self, vpathname, mode): try: node = self.get_node(vpathname) - except OSError, e: + except OSError as e: if e.errno == errno.ENOENT: return False raise diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandbox.py index ed46da595f..a3085a904c 100644 --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandbox.py @@ -159,7 +159,7 @@ def test_oserror(): def entry_point(argv): try: os.stat("somewhere") - except OSError, e: + except OSError as e: os.close(e.errno) # nonsense, just to see outside return 0 diff --git a/rpython/translator/sandbox/test/test_sandlib.py b/rpython/translator/sandbox/test/test_sandlib.py index 497d6ecb9e..a1307237a8 100644 --- a/rpython/translator/sandbox/test/test_sandlib.py +++ b/rpython/translator/sandbox/test/test_sandlib.py @@ -120,7 +120,7 @@ def test_oserror(): def entry_point(argv): try: os.open("/tmp/foobar", os.O_RDONLY, 0777) - except OSError, e: + except OSError as e: os.close(e.errno) # nonsense, just to see outside return 0 exe = compile(entry_point) @@ -155,7 +155,7 @@ def test_too_many_opens(): txt = os.read(fd, 100) if txt != "Hello, world!\n": print "Wrong content: %s" % txt - except OSError, e: + except OSError as e: # We expect to get EMFILE, for opening too many files. if e.errno != errno.EMFILE: print "OSError: %s!" % (e.errno,) @@ -170,7 +170,7 @@ def test_too_many_opens(): for i in range(500): fd = os.open('/this.pyc', os.O_RDONLY, 0777) open_files.append(fd) - except OSError, e: + except OSError as e: # We expect to get EMFILE, for opening too many files. if e.errno != errno.EMFILE: print "OSError: %s!" % (e.errno,) @@ -208,7 +208,7 @@ def test_fstat(): compare(st[7], fs[7], 7) compare(st[8], fs[8], 8) compare(st[9], fs[9], 9) - except OSError, e: + except OSError as e: print "OSError: %s" % (e.errno,) print "All ok!" return 0 diff --git a/rpython/translator/sandbox/vfs.py b/rpython/translator/sandbox/vfs.py index 8cbdda1292..324d0fdde1 100644 --- a/rpython/translator/sandbox/vfs.py +++ b/rpython/translator/sandbox/vfs.py @@ -133,5 +133,5 @@ class RealFile(File): def open(self): try: return open(self.path, "rb") - except IOError, e: + except IOError as e: raise OSError(e.errno, "open failed") diff --git a/rpython/translator/test/snippet.py b/rpython/translator/test/snippet.py index 92eb46ecb9..83f2ee81cd 100644 --- a/rpython/translator/test/snippet.py +++ b/rpython/translator/test/snippet.py @@ -796,7 +796,7 @@ def exception_deduction0(x): def exception_deduction(): try: exception_deduction0(2) - except Exc, e: + except Exc as e: return e return Exc() @@ -812,7 +812,7 @@ def exception_deduction_with_raise1(x): exception_deduction0(2) if x: raise Exc() - except Exc, e: + except Exc as e: witness(e) return e return Exc() @@ -822,7 +822,7 @@ def exception_deduction_with_raise2(x): exception_deduction0(2) if x: raise Exc - except Exc, e: + except Exc as e: witness(e) return e return Exc() @@ -832,7 +832,7 @@ def exception_deduction_with_raise3(x): exception_deduction0(2) if x: raise Exc, Exc() - except Exc, e: + except Exc as e: witness(e) return e return Exc() @@ -847,7 +847,7 @@ def exception_deduction_we_are_dumb(): a = 1 try: exception_deduction0(2) - except Exc, e: + except Exc as e: a += 1 return e return Exc() @@ -858,10 +858,10 @@ class Exc2(Exception): def nested_exception_deduction(): try: exception_deduction0(1) - except Exc, e: + except Exc as e: try: exception_deduction0(2) - except Exc2, f: + except Exc2 as f: return (e, f) return (e, Exc2()) return (Exc(), Exc2()) @@ -886,7 +886,7 @@ class Mod: s = self.s try: s.o() - except Exc3, e: + except Exc3 as e: return e.m() return 0 @@ -898,12 +898,12 @@ class Mod3: s = self.s try: s.o() - except Exc4, e1: + except Exc4 as e1: return e1.m() - except Exc3, e2: + except Exc3 as e2: try: return e2.m() - except Exc4, e3: + except Exc4 as e3: return e3.m() return 0 diff --git a/rpython/translator/tool/graphpage.py b/rpython/translator/tool/graphpage.py index 99c64932e5..b64d7468d7 100644 --- a/rpython/translator/tool/graphpage.py +++ b/rpython/translator/tool/graphpage.py @@ -405,13 +405,14 @@ def nameof(obj, cache={}): def try_show(obj): if isinstance(obj, FunctionGraph): obj.show() + return obj elif isinstance(obj, Link): - try_show(obj.prevblock) + return try_show(obj.prevblock) elif isinstance(obj, Block): graph = obj._slowly_get_graph() if isinstance(graph, FunctionGraph): graph.show() - return + return graph graph = IncompleteGraph(graph) SingleGraphPage(graph).display() else: |