summaryrefslogtreecommitdiffstats
path: root/scribus/fpoptimizer.cpp
diff options
context:
space:
mode:
authorcraig <craig@11d20701-8431-0410-a711-e3c959e3b870>2012-01-01 11:40:09 +0000
committercraig <craig@11d20701-8431-0410-a711-e3c959e3b870>2012-01-01 11:40:09 +0000
commit7ed83b6c6666eb8b6b104c211ae7e52907350372 (patch)
tree4430b556abac0ad660a0aacf1887d77f85d8be02 /scribus/fpoptimizer.cpp
downloadscribus-7ed83b6c6666eb8b6b104c211ae7e52907350372.tar.gz
scribus-7ed83b6c6666eb8b6b104c211ae7e52907350372.tar.xz
scribus-7ed83b6c6666eb8b6b104c211ae7e52907350372.zip
Branch 1.3.5 tree to 1.4.x tree, goodbye 1.3.x
git-svn-id: svn://scribus.net/branches/Version14x/Scribus@17163 11d20701-8431-0410-a711-e3c959e3b870
Diffstat (limited to 'scribus/fpoptimizer.cpp')
-rw-r--r--scribus/fpoptimizer.cpp1851
1 files changed, 1851 insertions, 0 deletions
diff --git a/scribus/fpoptimizer.cpp b/scribus/fpoptimizer.cpp
new file mode 100644
index 0000000..30470cb
--- /dev/null
+++ b/scribus/fpoptimizer.cpp
@@ -0,0 +1,1851 @@
+/*
+For general Scribus (>=1.3.2) copyright and licensing information please refer
+to the COPYING file provided with the program. Following this notice may exist
+a copyright and/or license notice that predates the release of Scribus 1.3.2
+for which a new license (GPL+exception) is in place.
+*/
+//===========================================
+// Function parser v2.8 optimizer by Bisqwit
+//===========================================
+
+/*
+ NOTE!
+ ----
+ Everything that goes into the #ifndef SUPPORT_OPTIMIZER part
+ (ie. when SUPPORT_OPTIMIZER is not defined) should be put in
+ the end of fparser.cc file, not in this file.
+
+ Everything in this file should be inside the #ifdef SUPPORT_OPTIMIZER
+ block (except the #include "fpconfig.hh" line).
+*/
+
+
+#include "fpconfig.h"
+
+#ifdef SUPPORT_OPTIMIZER
+
+#include "fparser.h"
+#include "fptypes.h"
+using namespace FUNCTIONPARSERTYPES;
+
+#include <cmath>
+#include <list>
+#include <utility>
+#include <cassert>
+using namespace std;
+
+#ifndef M_PI
+#define M_PI 3.1415926535897932384626433832795
+#endif
+
+#define CONSTANT_E 2.71828182845904509080 // exp(1)
+#define CONSTANT_PI M_PI // atan2(0,-1)
+#define CONSTANT_L10 2.30258509299404590109 // log(10)
+#define CONSTANT_L10I 0.43429448190325176116 // 1/log(10)
+#define CONSTANT_L10E CONSTANT_L10I // log10(e)
+#define CONSTANT_L10EI CONSTANT_L10 // 1/log10(e)
+#define CONSTANT_DR (180.0 / M_PI) // 180/pi
+#define CONSTANT_RD (M_PI / 180.0) // pi/180
+
+namespace {
+inline double Min(double d1, double d2)
+{
+ return d1<d2 ? d1 : d2;
+}
+inline double Max(double d1, double d2)
+{
+ return d1>d2 ? d1 : d2;
+}
+
+class compres
+{
+ // states: 0=false, 1=true, 2=unknown
+public:
+ compres(bool b) : state(b) {}
+ compres(char v) : state(v) {}
+ // is it?
+ operator bool() const { return state != 0; }
+ // is it not?
+ bool operator! () const { return state != 1; }
+ bool operator==(bool b) const { return state != !b; }
+ bool operator!=(bool b) const { return state != b; }
+private:
+ char state;
+};
+
+const compres maybe = (char)2;
+
+struct CodeTree;
+
+class SubTree
+{
+ CodeTree *tree;
+ bool sign; // Only possible when parent is cAdd or cMul
+
+ inline void flipsign() { sign = !sign; }
+public:
+ SubTree();
+ SubTree(double value);
+ SubTree(const SubTree &b);
+ SubTree(const CodeTree &b);
+
+ ~SubTree();
+ const SubTree &operator= (const SubTree &b);
+ const SubTree &operator= (const CodeTree &b);
+
+ bool getsign() const { return sign; }
+
+ const CodeTree* operator-> () const { return tree; }
+ const CodeTree& operator* () const { return *tree; }
+ struct CodeTree* operator-> () { return tree; }
+ struct CodeTree& operator* () { return *tree; }
+
+ bool operator< (const SubTree& b) const;
+ bool operator== (const SubTree& b) const;
+ void Negate(); // Note: Parent must be cAdd
+ void Invert(); // Note: Parent must be cMul
+
+ void CheckConstNeg();
+ void CheckConstInv();
+};
+
+bool IsNegate(const SubTree &p1, const SubTree &p2);
+bool IsInverse(const SubTree &p1, const SubTree &p2);
+
+typedef list<SubTree> paramlist;
+
+struct CodeTreeData
+{
+ paramlist args;
+
+private:
+ unsigned op; // Operation
+ double value; // In case of cImmed
+ unsigned var; // In case of cVar
+ unsigned funcno; // In case of cFCall, cPCall
+
+public:
+ CodeTreeData() : op(cAdd) {}
+ ~CodeTreeData() {}
+
+ void SetOp(unsigned newop) { op=newop; }
+ void SetFuncNo(unsigned newno) { funcno=newno; }
+ unsigned GetFuncNo() const { return funcno; }
+
+ bool IsFunc() const { return op == cFCall || op == cPCall; }
+ bool IsImmed() const { return op == cImmed; }
+ bool IsVar() const { return op == cVar; }
+ inline unsigned GetOp() const { return op; }
+ inline double GetImmed() const
+ {
+ return value;
+ }
+ inline unsigned GetVar() const
+ {
+ return var;
+ }
+
+ void AddParam(const SubTree &p)
+ {
+ args.push_back(p);
+ }
+ void SetVar(unsigned v)
+ {
+ args.clear();
+ op = cVar;
+ var = v;
+ }
+ void SetImmed(double v)
+ {
+ args.clear();
+ op = cImmed;
+ value = orig = v;
+ inverted = negated = false;
+ }
+ void NegateImmed()
+ {
+ negated = !negated;
+ UpdateValue();
+ }
+ void InvertImmed()
+ {
+ inverted = !inverted;
+ UpdateValue();
+ }
+
+ bool IsOriginal() const { return !(IsInverted() || IsNegated()); }
+ bool IsInverted() const { return inverted; }
+ bool IsNegated() const { return negated; }
+ bool IsInvertedOriginal() const { return IsInverted() && !IsNegated(); }
+ bool IsNegatedOriginal() const { return !IsInverted() && IsNegated(); }
+
+private:
+ void UpdateValue()
+ {
+ value = orig;
+ if(IsInverted()) { value = 1.0 / value;
+ // FIXME: potential divide by zero.
+ }
+ if(IsNegated()) value = -value;
+ }
+
+ double orig;
+ bool inverted;
+ bool negated;
+protected:
+ // Ensure we don't accidentally copy this
+ void operator=(const CodeTreeData &b);
+};
+
+
+class CodeTreeDataPtr
+{
+ typedef pair<CodeTreeData, unsigned> p_t;
+ typedef p_t* pp;
+ mutable pp p;
+
+ void Alloc() const { ++p->second; }
+ void Dealloc() const { if(!--p->second) delete p; p = 0; }
+
+ void PrepareForWrite()
+ {
+ // We're ready if we're the only owner.
+ if(p->second == 1) return;
+
+ // Then make a clone.
+ p_t *newtree = new p_t(p->first, 1);
+ // Forget the old
+ Dealloc();
+ // Keep the new
+ p = newtree;
+ }
+
+public:
+ CodeTreeDataPtr() : p(new p_t) { p->second = 1; }
+ CodeTreeDataPtr(const CodeTreeDataPtr &b): p(b.p) { Alloc(); }
+ ~CodeTreeDataPtr() { Dealloc(); }
+ const CodeTreeDataPtr &operator= (const CodeTreeDataPtr &b)
+ {
+ b.Alloc();
+ Dealloc();
+ p = b.p;
+ return *this;
+ }
+ const CodeTreeData *operator-> () const { return &p->first; }
+ const CodeTreeData &operator* () const { return p->first; }
+ CodeTreeData *operator-> () { PrepareForWrite(); return &p->first; }
+ CodeTreeData &operator* () { PrepareForWrite(); return p->first; }
+
+ void Shock();
+};
+
+
+#define CHECKCONSTNEG(item, op) \
+ ((op)==cMul) \
+ ? (item).CheckConstInv() \
+ : (item).CheckConstNeg()
+
+struct CodeTree
+{
+ CodeTreeDataPtr data;
+
+private:
+ typedef paramlist::iterator pit;
+ typedef paramlist::const_iterator pcit;
+
+ /*
+ template<unsigned v> inline void chk() const
+ {
+ }
+ */
+
+public:
+ const pcit GetBegin() const { return data->args.begin(); }
+ const pcit GetEnd() const { return data->args.end(); }
+ const pit GetBegin() { return data->args.begin(); }
+ const pit GetEnd() { return data->args.end(); }
+ const SubTree& getp0() const { /*chk<1>();*/pcit tmp=GetBegin(); return *tmp; }
+ const SubTree& getp1() const { /*chk<2>();*/pcit tmp=GetBegin(); ++tmp; return *tmp; }
+ const SubTree& getp2() const { /*chk<3>();*/pcit tmp=GetBegin(); ++tmp; ++tmp; return *tmp; }
+ unsigned GetArgCount() const { return data->args.size(); }
+ void Erase(const pit p) { data->args.erase(p); }
+
+ SubTree& getp0() { /*chk<1>();*/pit tmp=GetBegin(); return *tmp; }
+ SubTree& getp1() { /*chk<2>();*/pit tmp=GetBegin(); ++tmp; return *tmp; }
+ SubTree& getp2() { /*chk<3>();*/pit tmp=GetBegin(); ++tmp; ++tmp; return *tmp; }
+
+ // set
+ void SetImmed(double v) { data->SetImmed(v); }
+ void SetOp(unsigned op) { data->SetOp(op); }
+ void SetVar(unsigned v) { data->SetVar(v); }
+ // get
+ double GetImmed() const { return data->GetImmed(); }
+ unsigned GetVar() const { return data->GetVar(); }
+ unsigned GetOp() const { return data->GetOp(); }
+ // test
+ bool IsImmed() const { return data->IsImmed(); }
+ bool IsVar() const { return data->IsVar(); }
+ // act
+ void AddParam(const SubTree &p) { data->AddParam(p); }
+ void NegateImmed() { data->NegateImmed(); } // don't use when op!=cImmed
+ void InvertImmed() { data->InvertImmed(); } // don't use when op!=cImmed
+
+ compres NonZero() const { if(!IsImmed()) return maybe;
+ return GetImmed() != 0.0; }
+ compres IsPositive() const { if(!IsImmed()) return maybe;
+ return GetImmed() > 0.0; }
+
+private:
+ struct ConstList
+ {
+ double voidvalue;
+ list<pit> cp;
+ double value;
+ unsigned size() const { return cp.size(); }
+ };
+ struct ConstList BuildConstList();
+ void KillConst(const ConstList &cl)
+ {
+ for(list<pit>::const_iterator i=cl.cp.begin(); i!=cl.cp.end(); ++i)
+ Erase(*i);
+ }
+ void FinishConst(const ConstList &cl)
+ {
+ if(cl.value != cl.voidvalue && cl.size() > 1) AddParam(cl.value);
+ if(cl.value == cl.voidvalue || cl.size() > 1) KillConst(cl);
+ }
+
+public:
+ CodeTree() {}
+ CodeTree(double v) { SetImmed(v); }
+
+ CodeTree(unsigned op, const SubTree &p)
+ {
+ SetOp(op);
+ AddParam(p);
+ }
+ CodeTree(unsigned op, const SubTree &p1, const SubTree &p2)
+ {
+ SetOp(op);
+ AddParam(p1);
+ AddParam(p2);
+ }
+
+ bool operator== (const CodeTree& b) const;
+ bool operator< (const CodeTree& b) const;
+
+private:
+ bool IsSortable() const
+ {
+ switch(GetOp())
+ {
+ case cAdd: case cMul:
+ case cEqual:
+ case cAnd: case cOr:
+ case cMax: case cMin:
+ return true;
+ default:
+ return false;
+ }
+ }
+ void SortIfPossible()
+ {
+ if(IsSortable())
+ {
+ data->args.sort();
+ }
+ }
+
+ void ReplaceWithConst(double value)
+ {
+ SetImmed(value);
+
+ /* REMEMBER TO CALL CheckConstInv / CheckConstNeg
+ * FOR PARENT SubTree, OR MAYHEM HAPPENS
+ */
+ }
+
+ void ReplaceWith(const CodeTree &b)
+ {
+ // If b is child of *this, mayhem
+ // happens. So we first make a clone
+ // and then proceed with copy.
+ CodeTreeDataPtr tmp = b.data;
+ tmp.Shock();
+ data = tmp;
+ }
+
+ void ReplaceWith(unsigned op, const SubTree &p)
+ {
+ ReplaceWith(CodeTree(op, p));
+ }
+
+ void ReplaceWith(unsigned op, const SubTree &p1, const SubTree &p2)
+ {
+ ReplaceWith(CodeTree(op, p1, p2));
+ }
+
+ void OptimizeConflict()
+ {
+ // This optimization does this: x-x = 0, x/x = 1, a+b-a = b.
+
+ if(GetOp() == cAdd || GetOp() == cMul)
+ {
+ Redo:
+ pit a, b;
+ for(a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ for(b=GetBegin(); ++b != GetEnd(); )
+ {
+ const SubTree &p1 = *a;
+ const SubTree &p2 = *b;
+
+ if(GetOp() == cMul ? IsInverse(p1,p2)
+ : IsNegate(p1,p2))
+ {
+ // These parameters complement each others out
+ Erase(b);
+ Erase(a);
+ goto Redo;
+ }
+ }
+ }
+ }
+ OptimizeRedundant();
+ }
+
+ void OptimizeRedundant()
+ {
+ // This optimization does this: min()=0, max()=0, add()=0, mul()=1
+
+ if(!GetArgCount())
+ {
+ if(GetOp() == cAdd || GetOp() == cMin || GetOp() == cMax)
+ ReplaceWithConst(0);
+ else if(GetOp() == cMul)
+ ReplaceWithConst(1);
+ return;
+ }
+
+ // And this: mul(x) = x, min(x) = x, max(x) = x, add(x) = x
+
+ if(GetArgCount() == 1)
+ {
+ if(GetOp() == cMul || GetOp() == cAdd || GetOp() == cMin || GetOp() == cMax)
+ if(!getp0().getsign())
+ ReplaceWith(*getp0());
+ }
+
+ OptimizeDoubleNegations();
+ }
+
+ void OptimizeDoubleNegations()
+ {
+ if(GetOp() == cAdd)
+ {
+ // Eschew double negations
+
+ // If any of the elements is cMul
+ // and has a numeric constant, negate
+ // the constant and negate sign.
+
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ SubTree &pa = *a;
+ if(pa.getsign()
+ && pa->GetOp() == cMul)
+ {
+ CodeTree &p = *pa;
+ for(pit b=p.GetBegin();
+ b!=p.GetEnd(); ++b)
+ {
+ SubTree &pb = *b;
+ if(pb->IsImmed())
+ {
+ pb.Negate();
+ pa.Negate();
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ if(GetOp() == cMul)
+ {
+ // If any of the elements is cPow
+ // and has a numeric exponent, negate
+ // the exponent and negate sign.
+
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ SubTree &pa = *a;
+ if(pa.getsign() && pa->GetOp() == cPow)
+ {
+ CodeTree &p = *pa;
+ if(p.getp1()->IsImmed())
+ {
+ // negate ok for pow when op=cImmed
+ p.getp1().Negate();
+ pa.Negate();
+ }
+ }
+ }
+ }
+ }
+
+ void OptimizeConstantMath1()
+ {
+ // This optimization does three things:
+ // - For adding groups:
+ // Constants are added together.
+ // - For multiplying groups:
+ // Constants are multiplied together.
+ // - For function calls:
+ // If all parameters are constants,
+ // the call is replaced with constant value.
+
+ // First, do this:
+ OptimizeAddMulFlat();
+
+ switch(GetOp())
+ {
+ case cAdd:
+ {
+ ConstList cl = BuildConstList();
+ FinishConst(cl);
+ break;
+ }
+ case cMul:
+ {
+ ConstList cl = BuildConstList();
+
+ if(cl.value == 0.0) ReplaceWithConst(0.0);
+ else FinishConst(cl);
+
+ break;
+ }
+ #define ConstantUnaryFun(token, fun) \
+ case token: { const SubTree &p0 = getp0(); \
+ if(p0->IsImmed()) ReplaceWithConst(fun(p0->GetImmed())); \
+ break; }
+ #define ConstantBinaryFun(token, fun) \
+ case token: { const SubTree &p0 = getp0(); \
+ const SubTree &p1 = getp1(); \
+ if(p0->IsImmed() && \
+ p1->IsImmed()) ReplaceWithConst(fun(p0->GetImmed(), p1->GetImmed())); \
+ break; }
+
+ // FIXME: potential invalid parameters for functions
+ // can cause exceptions here
+
+ ConstantUnaryFun(cAbs, fabs);
+ ConstantUnaryFun(cAcos, acos);
+ ConstantUnaryFun(cAsin, asin);
+ ConstantUnaryFun(cAtan, atan);
+ ConstantUnaryFun(cCeil, ceil);
+ ConstantUnaryFun(cCos, cos);
+ ConstantUnaryFun(cCosh, cosh);
+ ConstantUnaryFun(cFloor, floor);
+ ConstantUnaryFun(cLog, log);
+ ConstantUnaryFun(cSin, sin);
+ ConstantUnaryFun(cSinh, sinh);
+ ConstantUnaryFun(cTan, tan);
+ ConstantUnaryFun(cTanh, tanh);
+ ConstantBinaryFun(cAtan2, atan2);
+ ConstantBinaryFun(cMax, Max);
+ ConstantBinaryFun(cMin, Min);
+ ConstantBinaryFun(cMod, fmod); // not a func, but belongs here too
+ ConstantBinaryFun(cPow, pow);
+
+ case cNeg:
+ case cSub:
+ case cDiv:
+ /* Unreached (nonexistent operator)
+ * TODO: internal error here?
+ */
+ break;
+
+ case cCot:
+ case cCsc:
+ case cSec:
+ case cDeg:
+ case cRad:
+ case cLog10:
+ case cSqrt:
+ case cExp:
+ /* Unreached (nonexistent function)
+ * TODO: internal error here?
+ */
+ break;
+ }
+
+ OptimizeConflict();
+ }
+
+ void OptimizeAddMulFlat()
+ {
+ // This optimization flattens the topography of the tree.
+ // Examples:
+ // x + (y+z) = x+y+z
+ // x * (y/z) = x*y/z
+ // x / (y/z) = x/y*z
+
+ if(GetOp() == cAdd || GetOp() == cMul)
+ {
+ // If children are same type as parent add them here
+ for(pit b, a=GetBegin(); a!=GetEnd(); a=b)
+ {
+ const SubTree &pa = *a; b=a; ++b;
+ if(pa->GetOp() != GetOp()) continue;
+
+ // Child is same type
+ for(pcit c=pa->GetBegin();
+ c!=pa->GetEnd();
+ ++c)
+ {
+ const SubTree &pb = *c;
+ if(pa.getsign())
+ {
+ // +a -(+b +c)
+ // means b and c will be negated
+
+ SubTree tmp = pb;
+ if(GetOp() == cMul)
+ tmp.Invert();
+ else
+ tmp.Negate();
+ AddParam(tmp);
+ }
+ else
+ AddParam(pb);
+ }
+ Erase(a);
+
+ // Note: OptimizeConstantMath1() would be a good thing to call next.
+ }
+ }
+ }
+
+ void OptimizeLinearCombine()
+ {
+ // This optimization does the following:
+ //
+ // x*x*x*x -> x^4
+ // x+x+x+x -> x*4
+ // x*x -> x^2
+ // x/z/z ->
+ //
+
+ // Remove conflicts first, so we don't have to worry about signs.
+ OptimizeConflict();
+
+ bool didchanges = false;
+ if(GetOp() == cAdd || GetOp() == cMul)
+ {
+ Redo:
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+
+ list<pit> poslist;
+
+ for(pit b=a; ++b!=GetEnd(); )
+ {
+ const SubTree &pb = *b;
+ if(*pa == *pb)
+ poslist.push_back(b);
+ }
+
+ unsigned min = 2;
+ if(poslist.size() >= min)
+ {
+ SubTree arvo = pa;
+ bool negate = arvo.getsign();
+
+ double factor = poslist.size() + 1;
+
+ if(negate)
+ {
+ arvo.Negate();
+ factor = -factor;
+ }
+
+ CodeTree tmp(GetOp()==cAdd ? cMul : cPow,
+ arvo,
+ factor);
+
+ list<pit>::const_iterator j;
+ for(j=poslist.begin(); j!=poslist.end(); ++j)
+ Erase(*j);
+ poslist.clear();
+
+ *a = tmp;
+ didchanges = true;
+ goto Redo;
+ }
+ }
+ }
+ if(didchanges)
+ {
+ // As a result, there might be need for this:
+ OptimizeAddMulFlat();
+ // And this:
+ OptimizeRedundant();
+ }
+ }
+
+ void OptimizeLogarithm()
+ {
+ /*
+ This is basic logarithm math:
+ pow(X,Y)/log(Y) = X
+ log(X)/log(Y) = logY(X)
+ log(X^Y) = log(X)*Y
+ log(X*Y) = log(X)+log(Y)
+ exp(log(X)*Y) = X^Y
+
+ This function does these optimizations:
+ pow(const_E, log(x)) = x
+ pow(const_E, log(x)*y) = x^y
+ pow(10, log(x)*const_L10I*y) = x^y
+ pow(z, log(x)/log(z)*y) = x^y
+
+ And this:
+ log(x^z) = z * log(x)
+ Which automatically causes these too:
+ log(pow(const_E, x)) = x
+ log(pow(y, x)) = x * log(y)
+ log(pow(pow(const_E, y), x)) = x*y
+
+ And it does this too:
+ log(x) + log(y) + log(z) = log(x * y * z)
+ log(x * exp(y)) = log(x) + y
+
+ */
+
+ // Must be already in exponential form.
+
+ // Optimize exponents before doing something.
+ OptimizeExponents();
+
+ if(GetOp() == cLog)
+ {
+ // We should have one parameter for log() function.
+ // If we don't, we're screwed.
+
+ const SubTree &p = getp0();
+
+ if(p->GetOp() == cPow)
+ {
+ // Found log(x^y)
+ SubTree p0 = p->getp0(); // x
+ SubTree p1 = p->getp1(); // y
+
+ // Build the new logarithm.
+ CodeTree tmp(GetOp(), p0); // log(x)
+
+ // Become log(x) * y
+ ReplaceWith(cMul, tmp, p1);
+ }
+ else if(p->GetOp() == cMul)
+ {
+ // Redefine &p nonconst
+ SubTree &p = getp0();
+
+ p->OptimizeAddMulFlat();
+ p->OptimizeExponents();
+ CHECKCONSTNEG(p, p->GetOp());
+
+ list<SubTree> adds;
+
+ for(pit b, a = p->GetBegin();
+ a != p->GetEnd(); a=b)
+ {
+ SubTree &pa = *a; b=a; ++b;
+ if(pa->GetOp() == cPow
+ && pa->getp0()->IsImmed()
+ && pa->getp0()->GetImmed() == CONSTANT_E)
+ {
+ adds.push_back(pa->getp1());
+ p->Erase(a);
+ continue;
+ }
+ }
+ if(adds.size())
+ {
+ CodeTree tmp(cAdd, *this);
+
+ list<SubTree>::const_iterator i;
+ for(i=adds.begin(); i!=adds.end(); ++i)
+ tmp.AddParam(*i);
+
+ ReplaceWith(tmp);
+ }
+ }
+ }
+ if(GetOp() == cAdd)
+ {
+ // Check which ones are logs.
+ list<pit> poslist;
+
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+ if(pa->GetOp() == cLog)
+ poslist.push_back(a);
+ }
+
+ if(poslist.size() >= 2)
+ {
+ CodeTree tmp(cMul, 1.0); // eek
+
+ list<pit>::const_iterator j;
+ for(j=poslist.begin(); j!=poslist.end(); ++j)
+ {
+ const SubTree &pb = **j;
+ // Take all of its children
+ for(pcit b=pb->GetBegin();
+ b!=pb->GetEnd();
+ ++b)
+ {
+ SubTree tmp2 = *b;
+ if(pb.getsign()) tmp2.Negate();
+ tmp.AddParam(tmp2);
+ }
+ Erase(*j);
+ }
+ poslist.clear();
+
+ AddParam(CodeTree(cLog, tmp));
+ }
+ // Done, hopefully
+ }
+ if(GetOp() == cPow)
+ {
+ const SubTree &p0 = getp0();
+ SubTree &p1 = getp1();
+
+ if(p0->IsImmed() && p0->GetImmed() == CONSTANT_E
+ && p1->GetOp() == cLog)
+ {
+ // pow(const_E, log(x)) = x
+ ReplaceWith(*(p1->getp0()));
+ }
+ else if(p1->GetOp() == cMul)
+ {
+ //bool didsomething = true;
+
+ pit poslogpos; bool foundposlog = false;
+ pit neglogpos; bool foundneglog = false;
+
+ ConstList cl = p1->BuildConstList();
+
+ for(pit a=p1->GetBegin(); a!=p1->GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+ if(pa->GetOp() == cLog)
+ {
+ if(!pa.getsign())
+ {
+ foundposlog = true;
+ poslogpos = a;
+ }
+ else if(*p0 == *(pa->getp0()))
+ {
+ foundneglog = true;
+ neglogpos = a;
+ }
+ }
+ }
+
+ if(p0->IsImmed()
+ && p0->GetImmed() == 10.0
+ && cl.value == CONSTANT_L10I
+ && foundposlog)
+ {
+ SubTree base = (*poslogpos)->getp0();
+ p1->KillConst(cl);
+ p1->Erase(poslogpos);
+ p1->OptimizeRedundant();
+ SubTree mul = p1;
+
+ ReplaceWith(cPow, base, mul);
+
+ // FIXME: what optimizations should be done now?
+ return;
+ }
+
+ // Put back the constant
+ FinishConst(cl);
+
+ if(p0->IsImmed()
+ && p0->GetImmed() == CONSTANT_E
+ && foundposlog)
+ {
+ SubTree base = (*poslogpos)->getp0();
+ p1->Erase(poslogpos);
+
+ p1->OptimizeRedundant();
+ SubTree mul = p1;
+
+ ReplaceWith(cPow, base, mul);
+
+ // FIXME: what optimizations should be done now?
+ return;
+ }
+
+ if(foundposlog
+ && foundneglog
+ && *((*neglogpos)->getp0()) == *p0)
+ {
+ SubTree base = (*poslogpos)->getp0();
+ p1->Erase(poslogpos);
+ p1->Erase(neglogpos);
+
+ p1->OptimizeRedundant();
+ SubTree mul = p1;
+
+ ReplaceWith(cPow, base, mul);
+
+ // FIXME: what optimizations should be done now?
+ return;
+ }
+ }
+ }
+ }
+
+ void OptimizeFunctionCalls()
+ {
+ /* Goals: sin(asin(x)) = x
+ * cos(acos(x)) = x
+ * tan(atan(x)) = x
+ * NOTE:
+ * Do NOT do these:
+ * asin(sin(x))
+ * acos(cos(x))
+ * atan(tan(x))
+ * Because someone might want to wrap the angle.
+ */
+ // FIXME: TODO
+ }
+
+ void OptimizePowMulAdd()
+ {
+ // x^3 * x -> x^4
+ // x*3 + x -> x*4
+ // FIXME: Do those
+
+ // x^1 -> x
+ if(GetOp() == cPow)
+ {
+ const SubTree &base = getp0();
+ const SubTree &exponent = getp1();
+
+ if(exponent->IsImmed())
+ {
+ if(exponent->GetImmed() == 1.0)
+ ReplaceWith(*base);
+ else if(exponent->GetImmed() == 0.0
+ && base->NonZero())
+ ReplaceWithConst(1.0);
+ }
+ }
+ }
+
+ void OptimizeExponents()
+ {
+ /* Goals:
+ * (x^y)^z -> x^(y*z)
+ * x^y * x^z -> x^(y+z)
+ */
+ // First move to exponential form.
+ OptimizeLinearCombine();
+
+ bool didchanges = false;
+
+ Redo:
+ if(GetOp() == cPow)
+ {
+ // (x^y)^z -> x^(y*z)
+
+ const SubTree &p0 = getp0();
+ const SubTree &p1 = getp1();
+ if(p0->GetOp() == cPow)
+ {
+ CodeTree tmp(cMul, p0->getp1(), p1);
+ tmp.Optimize();
+
+ ReplaceWith(cPow, p0->getp0(), tmp);
+
+ didchanges = true;
+ goto Redo;
+ }
+ }
+ if(GetOp() == cMul)
+ {
+ // x^y * x^z -> x^(y+z)
+
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+
+ if(pa->GetOp() != cPow) continue;
+
+ list<pit> poslist;
+
+ for(pit b=a; ++b != GetEnd(); )
+ {
+ const SubTree &pb = *b;
+ if(pb->GetOp() == cPow
+ && *(pa->getp0())
+ == *(pb->getp0()))
+ {
+ poslist.push_back(b);
+ }
+ }
+
+ if(poslist.size() >= 1)
+ {
+ poslist.push_back(a);
+
+ CodeTree base = *(pa->getp0());
+
+ CodeTree exponent(cAdd, 0.0); //eek
+
+ // Collect all exponents to cAdd
+ list<pit>::const_iterator i;
+ for(i=poslist.begin(); i!=poslist.end(); ++i)
+ {
+ const SubTree &pb = **i;
+
+ SubTree tmp2 = pb->getp1();
+ if(pb.getsign()) tmp2.Invert();
+
+ exponent.AddParam(tmp2);
+ }
+
+ exponent.Optimize();
+
+ CodeTree result(cPow, base, exponent);
+
+ for(i=poslist.begin(); i!=poslist.end(); ++i)
+ Erase(*i);
+ poslist.clear();
+
+ AddParam(result); // We're cMul, remember
+
+ didchanges = true;
+ goto Redo;
+ }
+ }
+ }
+
+ OptimizePowMulAdd();
+
+ if(didchanges)
+ {
+ // As a result, there might be need for this:
+ OptimizeConflict();
+ }
+ }
+
+ void OptimizeLinearExplode()
+ {
+ // x^2 -> x*x
+ // But only if x is just a simple thing
+
+ // Won't work on anything else.
+ if(GetOp() != cPow) return;
+
+ // TODO TODO TODO
+ }
+
+ void OptimizePascal()
+ {
+#if 0 // Too big, too specific, etc
+
+ // Won't work on anything else.
+ if(GetOp() != cAdd) return;
+
+ // Must be done after OptimizeLinearCombine();
+
+ // Don't need pascal triangle
+ // Coefficient for x^a * y^b * z^c = 3! / (a! * b! * c!)
+
+ // We are greedy and want other than just binomials
+ // FIXME
+
+ // note: partial ones are also nice
+ // x*x + x*y + y*y
+ // = (x+y)^2 - x*y
+ //
+ // x x * x y * + y y * +
+ // -> x y + dup * x y * -
+#endif
+ }
+
+public:
+
+ void Optimize();
+
+ void Assemble(vector<unsigned> &byteCode,
+ vector<double> &immed) const;
+
+ void FinalOptimize()
+ {
+ // First optimize each parameter.
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ (*a)->FinalOptimize();
+
+ /* These things are to be done:
+ *
+ * x * CONSTANT_DR -> cDeg(x)
+ * x * CONSTANT_RD -> cRad(x)
+ * pow(x, 0.5) -> sqrt(x)
+ * log(x) * CONSTANT_L10I -> log10(x)
+ * pow(CONSTANT_E, x) -> exp(x)
+ * inv(sin(x)) -> csc(x)
+ * inv(cos(x)) -> sec(x)
+ * inv(tan(x)) -> cot(x)
+ */
+
+
+ if(GetOp() == cPow)
+ {
+ const SubTree &p0 = getp0();
+ const SubTree &p1 = getp1();
+ if(p0->GetOp() == cImmed
+ && p0->GetImmed() == CONSTANT_E)
+ {
+ ReplaceWith(cExp, p1);
+ }
+ else if(p1->GetOp() == cImmed
+ && p1->GetImmed() == 0.5)
+ {
+ ReplaceWith(cSqrt, p0);
+ }
+ }
+ if(GetOp() == cMul)
+ {
+ if(GetArgCount() == 1 && getp0().getsign())
+ {
+ /***/if(getp0()->GetOp() == cSin)ReplaceWith(cCsc, getp0()->getp0());
+ else if(getp0()->GetOp() == cCos)ReplaceWith(cSec, getp0()->getp0());
+ else if(getp0()->GetOp() == cTan)ReplaceWith(cCot, getp0()->getp0());
+ }
+ }
+ // Separate "if", because op may have just changed
+ if(GetOp() == cMul)
+ {
+ CodeTree *found_log = 0;
+
+ ConstList cl = BuildConstList();
+
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ SubTree &pa = *a;
+ if(pa->GetOp() == cLog && !pa.getsign())
+ found_log = &*pa;
+ }
+ if(cl.value == CONSTANT_L10I && found_log)
+ {
+ // Change the log() to log10()
+ found_log->SetOp(cLog10);
+ // And forget the constant
+ KillConst(cl);
+ }
+ else if(cl.value == CONSTANT_DR)
+ {
+ OptimizeRedundant();
+ ReplaceWith(cDeg, *this);
+ }
+ else if(cl.value == CONSTANT_RD)
+ {
+ OptimizeRedundant();
+ ReplaceWith(cRad, *this);
+ }
+ else FinishConst(cl);
+ }
+
+ SortIfPossible();
+ }
+};
+
+void CodeTreeDataPtr::Shock()
+{
+ /*
+ PrepareForWrite();
+ paramlist &p2 = (*this)->args;
+ for(paramlist::iterator i=p2.begin(); i!=p2.end(); ++i)
+ {
+ (*i)->data.Shock();
+ }
+ */
+}
+
+CodeTree::ConstList CodeTree::BuildConstList()
+{
+ ConstList result;
+ result.value =
+ result.voidvalue = GetOp()==cMul ? 1.0 : 0.0;
+
+ list<pit> &cp = result.cp;
+ for(pit b, a=GetBegin(); a!=GetEnd(); a=b)
+ {
+ SubTree &pa = *a; b=a; ++b;
+ if(!pa->IsImmed()) continue;
+
+ double thisvalue = pa->GetImmed();
+ if(thisvalue == result.voidvalue)
+ {
+ // This value is no good, forget it
+ Erase(a);
+ continue;
+ }
+ if(GetOp() == cMul)
+ result.value *= thisvalue;
+ else
+ result.value += thisvalue;
+ cp.push_back(a);
+ }
+ if(GetOp() == cMul)
+ {
+ /*
+ Jos joku niistä arvoista on -1 eikä se ole ainoa arvo,
+ niin joku muu niistä arvoista negatoidaan.
+ */
+ for(bool done=false; cp.size() > 1 && !done; )
+ {
+ done = true;
+ for(list<pit>::iterator b,a=cp.begin(); a!=cp.end(); a=b)
+ {
+ b=a; ++b;
+ if((**a)->GetImmed() == -1.0)
+ {
+ Erase(*a);
+ cp.erase(a);
+
+ // take randomly something
+ (**cp.begin())->data->NegateImmed();
+ if(cp.size() < 2)break;
+ done = false;
+ }
+ }
+ }
+ }
+ return result;
+}
+
+void CodeTree::Assemble
+ (vector<unsigned> &byteCode,
+ vector<double> &immed) const
+{
+ #define AddCmd(op) byteCode.push_back((op))
+ #define AddConst(v) do { \
+ byteCode.push_back(cImmed); \
+ immed.push_back((v)); \
+ } while(0)
+
+ if(IsVar())
+ {
+ AddCmd(GetVar());
+ return;
+ }
+ if(IsImmed())
+ {
+ AddConst(GetImmed());
+ return;
+ }
+
+ switch(GetOp())
+ {
+ case cAdd:
+ case cMul:
+ {
+ unsigned opcount = 0;
+ for(pcit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+
+ if(opcount < 2) ++opcount;
+
+ bool pnega = pa.getsign();
+
+ bool done = false;
+ if(pa->IsImmed())
+ {
+ if(GetOp() == cMul
+ && pa->data->IsInverted()
+ && (pnega || opcount==2)
+ )
+ {
+ CodeTree tmp = *pa;
+ tmp.data->InvertImmed();
+ tmp.Assemble(byteCode, immed);
+ pnega = !pnega;
+ done = true;
+ }
+ else if(GetOp() == cAdd
+ && (pa->data->IsNegatedOriginal()
+ // || pa->GetImmed() < 0
+ )
+ && (pnega || opcount==2)
+ )
+ {
+ CodeTree tmp = *pa;
+ tmp.data->NegateImmed();
+ tmp.Assemble(byteCode, immed);
+ pnega = !pnega;
+ done = true;
+ }
+ }
+ if(!done)
+ pa->Assemble(byteCode, immed);
+
+ if(opcount == 2)
+ {
+ unsigned tmpop = GetOp();
+ if(pnega) // negate
+ {
+ tmpop = (tmpop == cMul) ? cDiv : cSub;
+ }
+ AddCmd(tmpop);
+ }
+ else if(pnega)
+ {
+ if(GetOp() == cMul) AddCmd(cInv);
+ else AddCmd(cNeg);
+ }
+ }
+ break;
+ }
+ case cIf:
+ {
+ // If the parameter amount is != 3, we're screwed.
+ getp0()->Assemble(byteCode, immed);
+
+ unsigned ofs = byteCode.size();
+ AddCmd(cIf);
+ AddCmd(0); // code index
+ AddCmd(0); // immed index
+
+ getp1()->Assemble(byteCode, immed);
+
+ byteCode[ofs+1] = byteCode.size()+2;
+ byteCode[ofs+2] = immed.size();
+
+ ofs = byteCode.size();
+ AddCmd(cJump);
+ AddCmd(0); // code index
+ AddCmd(0); // immed index
+
+ getp2()->Assemble(byteCode, immed);
+
+ byteCode[ofs+1] = byteCode.size()-1;
+ byteCode[ofs+2] = immed.size();
+
+ break;
+ }
+ case cFCall:
+ {
+ // If the parameter count is invalid, we're screwed.
+ for(pcit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+ pa->Assemble(byteCode, immed);
+ }
+ AddCmd(GetOp());
+ AddCmd(data->GetFuncNo());
+ break;
+ }
+ case cPCall:
+ {
+ // If the parameter count is invalid, we're screwed.
+ for(pcit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+ pa->Assemble(byteCode, immed);
+ }
+ AddCmd(GetOp());
+ AddCmd(data->GetFuncNo());
+ break;
+ }
+ default:
+ {
+ // If the parameter count is invalid, we're screwed.
+ for(pcit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ const SubTree &pa = *a;
+ pa->Assemble(byteCode, immed);
+ }
+ AddCmd(GetOp());
+ break;
+ }
+ }
+}
+
+void CodeTree::Optimize()
+{
+ // Phase:
+ // Phase 0: Do local optimizations.
+ // Phase 1: Optimize each.
+ // Phase 2: Do local optimizations again.
+
+ for(unsigned phase=0; phase<=2; ++phase)
+ {
+ if(phase == 1)
+ {
+ // Optimize each parameter.
+ for(pit a=GetBegin(); a!=GetEnd(); ++a)
+ {
+ (*a)->Optimize();
+ CHECKCONSTNEG(*a, GetOp());
+ }
+ continue;
+ }
+ if(phase == 0 || phase == 2)
+ {
+ // Do local optimizations.
+
+ OptimizeConstantMath1();
+ OptimizeLogarithm();
+ OptimizeFunctionCalls();
+ OptimizeExponents();
+ OptimizeLinearExplode();
+ OptimizePascal();
+
+ /* Optimization paths:
+
+ doublenegations=
+ redundant= * doublenegations
+ conflict= * redundant
+ addmulflat=
+ constantmath1= addmulflat * conflict
+ linearcombine= conflict * addmulflat¹ redundant¹
+ powmuladd=
+ exponents= linearcombine * powmuladd conflict¹
+ logarithm= exponents *
+ functioncalls= IDLE
+ linearexplode= IDLE
+ pascal= IDLE
+
+ * = actions here
+ ¹ = only if made changes
+ */
+ }
+ }
+}
+
+
+bool CodeTree::operator== (const CodeTree& b) const
+{
+ if(GetOp() != b.GetOp()) return false;
+ if(IsImmed()) if(GetImmed() != b.GetImmed()) return false;
+ if(IsVar()) if(GetVar() != b.GetVar()) return false;
+ if(data->IsFunc())
+ if(data->GetFuncNo() != b.data->GetFuncNo()) return false;
+ return data->args == b.data->args;
+}
+
+bool CodeTree::operator< (const CodeTree& b) const
+{
+ if(GetArgCount() != b.GetArgCount())
+ return GetArgCount() > b.GetArgCount();
+
+ if(GetOp() != b.GetOp())
+ {
+ // sort immeds last
+ if(IsImmed() != b.IsImmed()) return IsImmed() < b.IsImmed();
+
+ return GetOp() < b.GetOp();
+ }
+
+ if(IsImmed())
+ {
+ if(GetImmed() != b.GetImmed()) return GetImmed() < b.GetImmed();
+ }
+ if(IsVar() && GetVar() != b.GetVar())
+ {
+ return GetVar() < b.GetVar();
+ }
+ if(data->IsFunc() && data->GetFuncNo() != b.data->GetFuncNo())
+ {
+ return data->GetFuncNo() < b.data->GetFuncNo();
+ }
+
+ pcit i = GetBegin(), j = b.GetBegin();
+ for(; i != GetEnd(); ++i, ++j)
+ {
+ const SubTree &pa = *i, &pb = *j;
+
+ if(!(pa == pb))
+ return pa < pb;
+ }
+ return false;
+}
+
+
+bool IsNegate(const SubTree &p1, const SubTree &p2) /*const */
+{
+ if(p1->IsImmed() && p2->IsImmed())
+ {
+ return p1->GetImmed() == -p2->GetImmed();
+ }
+ if(p1.getsign() == p2.getsign()) return false;
+ return *p1 == *p2;
+}
+bool IsInverse(const SubTree &p1, const SubTree &p2) /*const*/
+{
+ if(p1->IsImmed() && p2->IsImmed())
+ {
+ // FIXME: potential divide by zero.
+ return p1->GetImmed() == 1.0 / p2->GetImmed();
+ }
+ if(p1.getsign() == p2.getsign()) return false;
+ return *p1 == *p2;
+}
+
+SubTree::SubTree() : tree(new CodeTree), sign(false)
+{
+}
+
+SubTree::SubTree(const SubTree &b) : tree(new CodeTree(*b.tree)), sign(b.sign)
+{
+}
+
+#define SubTreeDecl(p1, p2) \
+ SubTree::SubTree p1 : tree(new CodeTree p2), sign(false) { }
+
+SubTreeDecl( (const CodeTree &b), (b) )
+SubTreeDecl( (double value), (value) )
+
+#undef SubTreeDecl
+
+SubTree::~SubTree()
+{
+ delete tree; tree=0;
+}
+
+const SubTree &SubTree::operator= (const SubTree &b)
+{
+ sign = b.sign;
+ CodeTree *oldtree = tree;
+ tree = new CodeTree(*b.tree);
+ delete oldtree;
+ return *this;
+}
+const SubTree &SubTree::operator= (const CodeTree &b)
+{
+ sign = false;
+ CodeTree *oldtree = tree;
+ tree = new CodeTree(b);
+ delete oldtree;
+ return *this;
+}
+
+bool SubTree::operator< (const SubTree& b) const
+{
+ if(getsign() != b.getsign()) return getsign() < b.getsign();
+ return *tree < *b.tree;
+}
+bool SubTree::operator== (const SubTree& b) const
+{
+ return sign == b.sign && *tree == *b.tree;
+}
+void SubTree::Negate() // Note: Parent must be cAdd
+{
+ flipsign();
+ CheckConstNeg();
+}
+void SubTree::CheckConstNeg()
+{
+ if(tree->IsImmed() && getsign())
+ {
+ tree->NegateImmed();
+ sign = false;
+ }
+}
+void SubTree::Invert() // Note: Parent must be cMul
+{
+ flipsign();
+ CheckConstInv();
+}
+void SubTree::CheckConstInv()
+{
+ if(tree->IsImmed() && getsign())
+ {
+ tree->InvertImmed();
+ sign = false;
+ }
+}
+
+}//namespace
+
+void FunctionParser::MakeTree(void *r) const
+{
+ // Dirty hack. Should be fixed.
+ CodeTree* result = static_cast<CodeTree*>(r);
+
+ vector<CodeTree> stack(1);
+
+ #define GROW(n) do { \
+ stacktop += n; \
+ if(stack.size() <= stacktop) stack.resize(stacktop+1); \
+ } while(0)
+
+ #define EAT(n, opcode) do { \
+ unsigned newstacktop = stacktop-n; \
+ if((n) == 0) GROW(1); \
+ stack[stacktop].SetOp((opcode)); \
+ for(unsigned a=0, b=(n); a<b; ++a) \
+ stack[stacktop].AddParam(stack[newstacktop+a]); \
+ stack.erase(stack.begin() + newstacktop, \
+ stack.begin() + stacktop); \
+ stacktop = newstacktop; GROW(1); \
+ } while(0)
+
+ #define ADDCONST(n) do { \
+ stack[stacktop].SetImmed((n)); \
+ GROW(1); \
+ } while(0)
+
+ unsigned stacktop=0;
+
+ list<unsigned> labels;
+
+ const unsigned* const ByteCode = data->ByteCode;
+ const unsigned ByteCodeSize = data->ByteCodeSize;
+ const double* const Immed = data->Immed;
+
+ for(unsigned IP=0, DP=0; ; ++IP)
+ {
+ while(labels.size() > 0
+ && *labels.begin() == IP)
+ {
+ // The "else" of an "if" ends here
+ EAT(3, cIf);
+ labels.erase(labels.begin());
+ }
+
+ if(IP >= ByteCodeSize)
+ {
+ break;
+ }
+
+ unsigned opcode = ByteCode[IP];
+
+ if(opcode == cIf)
+ {
+ IP += 2;
+ }
+ else if(opcode == cJump)
+ {
+ labels.push_front(ByteCode[IP+1]+1);
+ IP += 2;
+ }
+ else if(opcode == cImmed)
+ {
+ ADDCONST(Immed[DP++]);
+ }
+ else if(opcode < VarBegin)
+ {
+ switch(opcode)
+ {
+ // Unary operators
+ case cNeg:
+ {
+ EAT(1, cAdd); // Unary minus is negative adding.
+ stack[stacktop-1].getp0().Negate();
+ break;
+ }
+ // Binary operators
+ case cSub:
+ {
+ EAT(2, cAdd); // Minus is negative adding
+ stack[stacktop-1].getp1().Negate();
+ break;
+ }
+ case cDiv:
+ {
+ EAT(2, cMul); // Divide is inverse multiply
+ stack[stacktop-1].getp1().Invert();
+ break;
+ }
+
+ // ADD ALL TWO PARAMETER NON-FUNCTIONS HERE
+ case cAdd: case cMul:
+ case cMod: case cPow:
+ case cEqual: case cLess: case cGreater:
+ case cNEqual: case cLessOrEq: case cGreaterOrEq:
+ case cAnd: case cOr:
+ EAT(2, opcode);
+ break;
+
+ // ADD ALL UNARY NON-FUNCTIONS HERE
+ case cNot:
+ EAT(1, opcode);
+ break;
+
+ case cFCall:
+ {
+ unsigned index = ByteCode[++IP];
+ unsigned params = data->FuncPtrs[index].params;
+ EAT(params, opcode);
+ stack[stacktop-1].data->SetFuncNo(index);
+ break;
+ }
+ case cPCall:
+ {
+ unsigned index = ByteCode[++IP];
+ unsigned params =
+ data->FuncParsers[index]->data->varAmount;
+ EAT(params, opcode);
+ stack[stacktop-1].data->SetFuncNo(index);
+ break;
+ }
+
+ // Converted to cMul on fly
+ case cDeg:
+ ADDCONST(CONSTANT_DR);
+ EAT(2, cMul);
+ break;
+
+ // Converted to cMul on fly
+ case cRad:
+ ADDCONST(CONSTANT_RD);
+ EAT(2, cMul);
+ break;
+
+ // Functions
+ default:
+ {
+ //assert(opcode >= cAbs);
+ unsigned funcno = opcode-cAbs;
+ assert(funcno < sizeof(Functions)/sizeof(Functions[0]));
+ const FuncDefinition& func = Functions[funcno];
+
+ //const FuncDefinition& func = Functions[opcode-cAbs];
+
+ unsigned paramcount = func.params;
+#ifndef DISABLE_EVAL
+ if(opcode == cEval) paramcount = data->varAmount;
+#endif
+ if(opcode == cSqrt)
+ {
+ // Converted on fly: sqrt(x) = x^0.5
+ opcode = cPow;
+ paramcount = 2;
+ ADDCONST(0.5);
+ }
+ if(opcode == cExp)
+ {
+ // Converted on fly: exp(x) = CONSTANT_E^x
+
+ opcode = cPow;
+ paramcount = 2;
+ // reverse the parameters... kludgey
+ stack[stacktop] = stack[stacktop-1];
+ stack[stacktop-1].SetImmed(CONSTANT_E);
+ GROW(1);
+ }
+ bool do_inv = false;
+ if(opcode == cCot) { do_inv = true; opcode = cTan; }
+ if(opcode == cCsc) { do_inv = true; opcode = cSin; }
+ if(opcode == cSec) { do_inv = true; opcode = cCos; }
+
+ bool do_log10 = false;
+ if(opcode == cLog10)
+ {
+ // Converted on fly: log10(x) = log(x) * CONSTANT_L10I
+ opcode = cLog;
+ do_log10 = true;
+ }
+ EAT(paramcount, opcode);
+ if(do_log10)
+ {
+ ADDCONST(CONSTANT_L10I);
+ EAT(2, cMul);
+ }
+ if(do_inv)
+ {
+ // Unary cMul, inverted. No need for "1.0"
+ EAT(1, cMul);
+ stack[stacktop-1].getp0().Invert();
+ }
+ break;
+ }
+ }
+ }
+ else
+ {
+ stack[stacktop].SetVar(opcode);
+ GROW(1);
+ }
+ }
+
+ if(!stacktop)
+ {
+ // ERROR: Stack does not have any values!
+ return;
+ }
+
+ --stacktop; // Ignore the last element, it is always nop (cAdd).
+
+ if(stacktop > 0)
+ {
+ // ERROR: Stack has too many values!
+ return;
+ }
+
+ // Okay, the tree is now stack[0]
+ *result = stack[0];
+}
+
+void FunctionParser::Optimize()
+{
+ copyOnWrite();
+
+ CodeTree tree;
+ MakeTree(&tree);
+
+ // Do all sorts of optimizations
+ tree.Optimize();
+ // Last changes before assembly
+ tree.FinalOptimize();
+
+ // Now rebuild from the tree.
+
+ vector<unsigned> byteCode;
+ vector<double> immed;
+
+#if 0
+ byteCode.resize(Comp.ByteCodeSize);
+ for(unsigned a=0; a<Comp.ByteCodeSize; ++a)byteCode[a] = Comp.ByteCode[a];
+
+ immed.resize(Comp.ImmedSize);
+ for(unsigned a=0; a<Comp.ImmedSize; ++a)immed[a] = Comp.Immed[a];
+#else
+ byteCode.clear(); immed.clear();
+ tree.Assemble(byteCode, immed);
+#endif
+
+ delete[] data->ByteCode; data->ByteCode = 0;
+ if((data->ByteCodeSize = byteCode.size()) > 0)
+ {
+ data->ByteCode = new unsigned[data->ByteCodeSize];
+ for(unsigned a=0; a<byteCode.size(); ++a)
+ data->ByteCode[a] = byteCode[a];
+ }
+
+ delete[] data->Immed; data->Immed = 0;
+ if((data->ImmedSize = immed.size()) > 0)
+ {
+ data->Immed = new double[data->ImmedSize];
+ for(unsigned a=0; a<immed.size(); ++a)
+ data->Immed[a] = immed[a];
+ }
+}
+
+
+#endif // #ifdef SUPPORT_OPTIMIZER