summaryrefslogtreecommitdiffstats
path: root/staptree.cxx
diff options
context:
space:
mode:
authorfche <fche>2006-01-26 22:55:20 +0000
committerfche <fche>2006-01-26 22:55:20 +0000
commit3066c15c5313ba37afb518ef13d207c764a8ad2a (patch)
tree1fdbc217129e2f46ca4a977f9685bf2a0c27b726 /staptree.cxx
parent65bf1df7d18eae25ae865b5179395a339cc9d283 (diff)
downloadsystemtap-steved-3066c15c5313ba37afb518ef13d207c764a8ad2a.tar.gz
systemtap-steved-3066c15c5313ba37afb518ef13d207c764a8ad2a.tar.xz
systemtap-steved-3066c15c5313ba37afb518ef13d207c764a8ad2a.zip
2006-01-26 Frank Ch. Eigler <fche@elastic.org>
PR 2060: lock elevation, mop-up * staptree.cxx (functioncall_traversing_visitor): Store a current_function pointer during traversal. (visit_embeddedcode): Use it to handle $target-synthesized functions. (varuse_collecting_visitor::visit_assignment): Correct l-lr typo. (visit_foreach_loop): Note added write on sorted foreach. (visit_delete_statement): Note as read+write. * staptree.h: Corresponding changes. * elaborate.cxx (dead_assignment_remover::visit_expr_statement): Correct stmt token after possible expression rewriting. * tapsets.cxx (visit_target_symbol): Create naming convention to recognize $target-synthesized functions. * translate.cxx (emit_locks, emit_unlocks): New functions to emit lock/unlock sequences at the outermost level of a probe. (emit_probe): Call them. (varlock_*): #if-0 out the lock code generation. Later, these classes should be removed. (translate_pass): Emit read_trylock() kludge macro for old kernels.
Diffstat (limited to 'staptree.cxx')
-rw-r--r--staptree.cxx48
1 files changed, 46 insertions, 2 deletions
diff --git a/staptree.cxx b/staptree.cxx
index 97b4bcc1..b4398312 100644
--- a/staptree.cxx
+++ b/staptree.cxx
@@ -1460,7 +1460,10 @@ functioncall_traversing_visitor::visit_functioncall (functioncall* e)
{
traversed.insert (e->referent);
// recurse
+ functiondecl* last_current_function = current_function;
+ current_function = e->referent;
e->referent->body->visit (this);
+ current_function = last_current_function;
}
}
@@ -1468,6 +1471,19 @@ functioncall_traversing_visitor::visit_functioncall (functioncall* e)
void
varuse_collecting_visitor::visit_embeddedcode (embeddedcode *s)
{
+ // In order to elide unused but correct functions generated to
+ // get/set $target variables, we encode our knowledge that such
+ // functions are side-effect-free. We tell them apart from ordinary
+ // tapset embedded-C functions by the naming prefix. XXX Something
+ // apart from this heuristic would be nice. XXX Similarly, some
+ // tapset embedded-C functions are pure and disposable, like
+ // substr().
+
+ assert (current_function); // only they get embedded code
+ string name = current_function->name;
+ if (name.length() > 6 && name.substr(0, 6) == "_tvar_")
+ return;
+
embedded_seen = true;
}
@@ -1489,7 +1505,7 @@ varuse_collecting_visitor::visit_assignment (assignment *e)
{
if (e->op == "=" || e->op == "<<<") // pure writes
{
- expression* last_lvalue = current_lrvalue;
+ expression* last_lvalue = current_lvalue;
current_lvalue = e->left; // leave a mark for ::visit_symbol
functioncall_traversing_visitor::visit_assignment (e);
current_lvalue = last_lvalue;
@@ -1581,9 +1597,37 @@ varuse_collecting_visitor::visit_post_crement (post_crement *e)
current_lrvalue = last_lrvalue;
}
+void
+varuse_collecting_visitor::visit_foreach_loop (foreach_loop* s)
+{
+ functioncall_traversing_visitor::visit_foreach_loop (s);
+ // If the collection is sorted, imply a "write" access to the
+ // array in addition to the "read" one already noted in the
+ // base class call above.
+ if (s->sort_direction)
+ {
+ symbol *array = NULL;
+ hist_op *hist = NULL;
+ classify_indexable (s->base, array, hist);
+ if (array) this->written.insert (array->referent);
+ // XXX: Can hist_op iterations be sorted?
+ }
+}
-
+void
+varuse_collecting_visitor::visit_delete_statement (delete_statement* s)
+{
+ // Ideally, this would be treated like an assignment: a plain write
+ // to the underlying value ("lvalue"). XXX: However, the
+ // optimization pass is not smart enough to remove an unneeded
+ // "delete" yet, so we pose more like a *crement ("lrvalue"). This
+ // should protect the underlying value from optimizional mischief.
+ expression* last_lrvalue = current_lrvalue;
+ current_lrvalue = s->value; // leave a mark for ::visit_symbol
+ functioncall_traversing_visitor::visit_delete_statement (s);
+ current_lrvalue = last_lrvalue;
+}
// ------------------------------------------------------------------------