Stop using new[] on llvm::BumpPtrAllocator.


git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@159833 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/include/clang/AST/CommentLexer.h b/include/clang/AST/CommentLexer.h
index f8dfd27..6683788 100644
--- a/include/clang/AST/CommentLexer.h
+++ b/include/clang/AST/CommentLexer.h
@@ -479,7 +479,7 @@
       return false;
     }
 
-    char *TextPtr = new (Allocator) char[Length + 1];
+    char *TextPtr = Allocator.Allocate<char>(Length + 1);
 
     memcpy(TextPtr, WordText.c_str(), Length + 1);
     StringRef Text = StringRef(TextPtr, Length);
@@ -525,7 +525,7 @@
     }
 
     const unsigned Length = WordText.size();
-    char *TextPtr = new (Allocator) char[Length + 1];
+    char *TextPtr = Allocator.Allocate<char>(Length + 1);
 
     memcpy(TextPtr, WordText.c_str(), Length + 1);
     StringRef Text = StringRef(TextPtr, Length);
diff --git a/include/clang/AST/CommentParser.h b/include/clang/AST/CommentParser.h
index 53c5866..e75d797 100644
--- a/include/clang/AST/CommentParser.h
+++ b/include/clang/AST/CommentParser.h
@@ -34,8 +34,8 @@
   ArrayRef<T> copyArray(ArrayRef<T> Source) {
     size_t Size = Source.size();
     if (Size != 0) {
-      T *Mem = new (Allocator) T[Size];
-      std::copy(Source.begin(), Source.end(), Mem);
+      T *Mem = Allocator.Allocate<T>(Size);
+      std::uninitialized_copy(Source.begin(), Source.end(), Mem);
       return llvm::makeArrayRef(Mem, Size);
     } else
       return llvm::makeArrayRef(static_cast<T *>(NULL), 0);
diff --git a/lib/AST/CommentParser.cpp b/lib/AST/CommentParser.cpp
index 75eae46..14a2d85 100644
--- a/lib/AST/CommentParser.cpp
+++ b/lib/AST/CommentParser.cpp
@@ -47,7 +47,8 @@
     TextTokenRetokenizer &Retokenizer,
     unsigned NumArgs) {
   typedef BlockCommandComment::Argument Argument;
-  Argument *Args = new (Allocator) Argument[NumArgs];
+  Argument *Args =
+      new (Allocator.Allocate<Argument>(NumArgs)) Argument[NumArgs];
   unsigned ParsedArgs = 0;
   Token Arg;
   while (ParsedArgs < NumArgs && Retokenizer.lexWord(Arg)) {