Fix another major performance regression in LiveVariables by not canonicalizing the underlying ImmutableSets on every analyzed statement (just at merges).  Fixes <rdar://problem/10087538>.

git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@140958 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Analysis/LiveVariables.cpp b/lib/Analysis/LiveVariables.cpp
index 055b58c..be6e659 100644
--- a/lib/Analysis/LiveVariables.cpp
+++ b/lib/Analysis/LiveVariables.cpp
@@ -205,7 +205,10 @@
   void dumpBlockLiveness(const SourceManager& M);
 
   LiveVariablesImpl(AnalysisContext &ac, bool KillAtAssign)
-    : analysisContext(ac), killAtAssign(KillAtAssign) {}
+    : analysisContext(ac),
+      SSetFact(false), // Do not canonicalize ImmutableSets by default.
+      DSetFact(false), // This is a *major* performance win.
+      killAtAssign(KillAtAssign) {}
 };
 }
 
@@ -255,6 +258,8 @@
   SSetRefA = mergeSets(SSetRefA, SSetRefB);
   DSetRefA = mergeSets(DSetRefA, DSetRefB);
   
+  // asImmutableSet() canonicalizes the tree, allowing us to do an easy
+  // comparison afterwards.
   return LiveVariables::LivenessValues(SSetRefA.asImmutableSet(),
                                        DSetRefA.asImmutableSet());  
 }