IVGCVSW-6399 Remove deprecated code 22.02 (FullyConnected)

 * Remove deprecated INetwork::AddFullyconnectedLayer() taking weights
   as argument as they are now taken as separate input layers.
 * Updated test that was using the deprecated function.
 * Remove calls in pyarmnn

Signed-off-by: Francis Murtagh <francis.murtagh@arm.com>
Change-Id: Ibc52ac7fa57afd9033eb226bbc24a09e88a7d361
diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp
index 505edf8..173e8d5 100644
--- a/include/armnn/INetwork.hpp
+++ b/include/armnn/INetwork.hpp
@@ -392,18 +392,6 @@
     IConnectableLayer* AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
                                               const char* name = nullptr);
 
-    ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This AddFullyConnectedLayer overload is deprecated", "22.05")
-    IConnectableLayer* AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
-                                              const Optional<ConstTensor>& weights,
-                                              const Optional<ConstTensor>& biases,
-                                              const char* name = nullptr);
-
-    ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This AddFullyConnectedLayer overload is deprecated", "22.05")
-    IConnectableLayer* AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
-                                              const ConstTensor& weights,
-                                              const Optional<ConstTensor>& biases,
-                                              const char* name = nullptr);
-
     /// Adds a permute layer to the network.
     /// @param permuteDescriptor - PermuteDescriptor to configure the permute.
     /// @param name - Optional name for the layer.
diff --git a/python/pyarmnn/src/pyarmnn/__init__.py b/python/pyarmnn/src/pyarmnn/__init__.py
index 5f95cb0..b994bb7 100644
--- a/python/pyarmnn/src/pyarmnn/__init__.py
+++ b/python/pyarmnn/src/pyarmnn/__init__.py
@@ -38,7 +38,7 @@
     from ._generated.pyarmnn_deserializer import IDeserializer
 except ImportError as err:
     logger = logging.getLogger(__name__)
-    message = "Your ArmNN library instance does not have an armnn models parser funcionality. "
+    message = "Your ArmNN library instance does not have ArmNN model (.armnn) parser functionality. "
     logger.warning("%s Skipped IDeserializer import.", message)
     logger.debug(str(err))
 
diff --git a/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i b/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
index 643f99f..eb8dede 100644
--- a/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
+++ b/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
@@ -1105,35 +1105,6 @@
 
     %feature("docstring",
         "
-        Adds a Fully Connected layer to the network with input weights and optional bias.
-        Also known as a Linear or Dense layer.
-
-        Args:
-            fullyConnectedDescriptor (FullyConnectedDescriptor): Description of the fully connected layer.
-            weights (ConstTensor): Tensor for the weights data.
-            biases (ConstTensor): Optional tensor for the bias data.
-            name (str): Optional name for the layer.
-
-        Returns:
-            IConnectableLayer: Interface for configuring the layer.
-    ") AddFullyConnectedLayer;
-    armnn::IConnectableLayer* AddFullyConnectedLayer(const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
-                                                     const armnn::ConstTensor& weights,
-                                                     armnn::ConstTensor* biases = nullptr,
-                                                     const char* name = nullptr) {
-        ARMNN_NO_DEPRECATE_WARN_BEGIN
-        if (biases) {
-            return $self->AddFullyConnectedLayer(fullyConnectedDescriptor, weights,
-                                                 armnn::Optional<armnn::ConstTensor>(*biases), name);
-        } else {
-            return $self->AddFullyConnectedLayer(fullyConnectedDescriptor, weights,
-                                                 armnn::Optional<armnn::ConstTensor>(), name);
-        }
-        ARMNN_NO_DEPRECATE_WARN_END
-    }
-
-    %feature("docstring",
-        "
         Adds a 2D Transpose Convolution layer to the network.
 
         Args:
diff --git a/python/pyarmnn/test/test_network.py b/python/pyarmnn/test/test_network.py
index e56621a..27ad70b 100644
--- a/python/pyarmnn/test/test_network.py
+++ b/python/pyarmnn/test/test_network.py
@@ -249,35 +249,6 @@
 def test_network_method_exists(method):
     assert getattr(ann.INetwork, method, None)
 
-
-def test_fullyconnected_layer_optional_none():
-    net = ann.INetwork()
-    layer = net.AddFullyConnectedLayer(ann.FullyConnectedDescriptor(),
-                                       ann.ConstTensor())
-
-    assert layer
-
-
-def test_fullyconnected_layer_optional_provided():
-    net = ann.INetwork()
-    layer = net.AddFullyConnectedLayer(ann.FullyConnectedDescriptor(),
-                                       ann.ConstTensor(),
-                                       ann.ConstTensor())
-
-    assert layer
-
-
-def test_fullyconnected_layer_all_args():
-    net = ann.INetwork()
-    layer = net.AddFullyConnectedLayer(ann.FullyConnectedDescriptor(),
-                                       ann.ConstTensor(),
-                                       ann.ConstTensor(),
-                                       'NAME1')
-
-    assert layer
-    assert 'NAME1' == layer.GetName()
-
-
 def test_DepthwiseConvolution2d_layer_optional_none():
     net = ann.INetwork()
     layer = net.AddDepthwiseConvolution2dLayer(convolution2dDescriptor=ann.DepthwiseConvolution2dDescriptor(),
diff --git a/samples/CustomMemoryAllocatorSample.cpp b/samples/CustomMemoryAllocatorSample.cpp
index a1b05d4..da249e0 100644
--- a/samples/CustomMemoryAllocatorSample.cpp
+++ b/samples/CustomMemoryAllocatorSample.cpp
@@ -71,26 +71,27 @@
 
     // Turn on logging to standard output
     // This is useful in this sample so that users can learn more about what is going on
-    armnn::ConfigureLogging(true, false, LogSeverity::Info);
+    ConfigureLogging(true, false, LogSeverity::Info);
 
     // Construct ArmNN network
-    armnn::NetworkId networkIdentifier;
-    INetworkPtr myNetwork = INetwork::Create();
-    armnn::FullyConnectedDescriptor fullyConnectedDesc;
+    NetworkId networkIdentifier;
+    INetworkPtr network = INetwork::Create();
+    FullyConnectedDescriptor fullyConnectedDesc;
     float weightsData[] = {1.0f}; // Identity
     TensorInfo weightsInfo(TensorShape({1, 1}), DataType::Float32, 0.0f, 0, true);
     weightsInfo.SetConstant(true);
-    armnn::ConstTensor weights(weightsInfo, weightsData);
-    ARMNN_NO_DEPRECATE_WARN_BEGIN
-    IConnectableLayer *fullyConnected = myNetwork->AddFullyConnectedLayer(fullyConnectedDesc,
-                                                                          weights,
-                                                                          EmptyOptional(),
-                                                                          "fully connected");
-    ARMNN_NO_DEPRECATE_WARN_END
-    IConnectableLayer *InputLayer = myNetwork->AddInputLayer(0);
-    IConnectableLayer *OutputLayer = myNetwork->AddOutputLayer(0);
-    InputLayer->GetOutputSlot(0).Connect(fullyConnected->GetInputSlot(0));
-    fullyConnected->GetOutputSlot(0).Connect(OutputLayer->GetInputSlot(0));
+    ConstTensor weights(weightsInfo, weightsData);
+
+    IConnectableLayer* inputLayer   = network->AddInputLayer(0);
+    IConnectableLayer* weightsLayer = network->AddConstantLayer(weights, "Weights");
+    IConnectableLayer* fullyConnectedLayer =
+            network->AddFullyConnectedLayer(fullyConnectedDesc, "fully connected");
+    IConnectableLayer* outputLayer  = network->AddOutputLayer(0);
+
+    inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
+    weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
+    fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+    weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
 
     // Create ArmNN runtime:
     //
@@ -111,19 +112,19 @@
 
     //Set the tensors in the network.
     TensorInfo inputTensorInfo(TensorShape({1, 1}), DataType::Float32);
-    InputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+    inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
 
     unsigned int numElements = inputTensorInfo.GetNumElements();
     size_t totalBytes = numElements * sizeof(float);
 
     TensorInfo outputTensorInfo(TensorShape({1, 1}), DataType::Float32);
-    fullyConnected->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+    fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
 
     // Optimise ArmNN network
     OptimizerOptions optOptions;
     optOptions.m_ImportEnabled = true;
-    armnn::IOptimizedNetworkPtr optNet =
-                Optimize(*myNetwork, {"GpuAcc"}, runtime->GetDeviceSpec(), optOptions);
+    IOptimizedNetworkPtr optNet =
+                Optimize(*network, {"GpuAcc"}, runtime->GetDeviceSpec(), optOptions);
     if (!optNet)
     {
         // This shouldn't happen for this simple sample, with GpuAcc backend.
@@ -154,13 +155,13 @@
 
     inputTensorInfo = runtime->GetInputTensorInfo(networkIdentifier, 0);
     inputTensorInfo.SetConstant(true);
-    armnn::InputTensors inputTensors
+    InputTensors inputTensors
     {
-        {0, armnn::ConstTensor(inputTensorInfo, alignedInputPtr)},
+        {0, ConstTensor(inputTensorInfo, alignedInputPtr)},
     };
-    armnn::OutputTensors outputTensors
+    OutputTensors outputTensors
     {
-        {0, armnn::Tensor(runtime->GetOutputTensorInfo(networkIdentifier, 0), alignedOutputPtr)}
+        {0, Tensor(runtime->GetOutputTensorInfo(networkIdentifier, 0), alignedOutputPtr)}
     };
 
     // Execute network
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 8ec8b42..408003e 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -171,25 +171,6 @@
     return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor, name);
 }
 
-IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
-                                                    const ConstTensor& weights,
-                                                    const Optional<ConstTensor>& biases,
-                                                    const char* name)
-{
-    return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor,
-                                                armnn::Optional<ConstTensor>(weights),
-                                                biases,
-                                                name);
-}
-
-IConnectableLayer* INetwork::AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor,
-                                                    const Optional<ConstTensor>& weights,
-                                                    const Optional<ConstTensor>& biases,
-                                                    const char* name)
-{
-    return pNetworkImpl->AddFullyConnectedLayer(fullyConnectedDescriptor, weights, biases, name);
-}
-
 IConnectableLayer* INetwork::AddPermuteLayer(const PermuteDescriptor& permuteDescriptor,
                                              const char* name)
 {
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 632a80a..966dc6c 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -927,22 +927,21 @@
 
     armnn::INetworkPtr network = armnn::INetwork::Create();
     armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
-
-    // Old way of handling constant tensors.
-    ARMNN_NO_DEPRECATE_WARN_BEGIN
+    armnn::IConnectableLayer* const weightsInputLayer = network->AddInputLayer(1);
+    armnn::IConnectableLayer* const biasInputLayer = network->AddInputLayer(2);
     armnn::IConnectableLayer* const fullyConnectedLayer =
-        network->AddFullyConnectedLayer(descriptor,
-                                        weights,
-                                        armnn::Optional<armnn::ConstTensor>(biases),
-                                        layerName.c_str());
-    ARMNN_NO_DEPRECATE_WARN_END
-
+            network->AddFullyConnectedLayer(descriptor,
+                                            layerName.c_str());
     armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
 
     inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
+    weightsInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
+    biasInputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(2));
     fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
 
     inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
+    weightsInputLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
+    biasInputLayer->GetOutputSlot(0).SetTensorInfo(biasesInfo);
     fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
 
     armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
diff --git a/src/backends/cl/test/ClCustomAllocatorTests.cpp b/src/backends/cl/test/ClCustomAllocatorTests.cpp
index c09d0b2..139e688 100644
--- a/src/backends/cl/test/ClCustomAllocatorTests.cpp
+++ b/src/backends/cl/test/ClCustomAllocatorTests.cpp
@@ -40,7 +40,6 @@
         }
         size_t space = size + alignment + alignment;
         auto allocatedMemPtr = std::malloc(space * sizeof(size_t));
-
         if (std::align(alignment, size, allocatedMemPtr, space) == nullptr)
         {
             throw armnn::Exception("SampleClBackendCustomAllocator::Alignment failed");
@@ -63,7 +62,6 @@
 armnn::INetworkPtr CreateTestNetwork(armnn::TensorInfo& inputTensorInfo)
 {
     using namespace armnn;
-    INetworkPtr myNetwork = INetwork::Create();
 
     armnn::FullyConnectedDescriptor fullyConnectedDesc;
     float weightsData[] = {1.0f}; // Identity
@@ -71,25 +69,27 @@
     weightsInfo.SetConstant(true);
     armnn::ConstTensor weights(weightsInfo, weightsData);
 
-    ARMNN_NO_DEPRECATE_WARN_BEGIN
-    IConnectableLayer* fullyConnected = myNetwork->AddFullyConnectedLayer(fullyConnectedDesc,
-                                                                          weights,
-                                                                          EmptyOptional(),
-                                                                          "fully connected");
-    ARMNN_NO_DEPRECATE_WARN_END
-    IConnectableLayer* InputLayer = myNetwork->AddInputLayer(0);
-    IConnectableLayer* OutputLayer = myNetwork->AddOutputLayer(0);
-    InputLayer->GetOutputSlot(0).Connect(fullyConnected->GetInputSlot(0));
-    fullyConnected->GetOutputSlot(0).Connect(OutputLayer->GetInputSlot(0));
+    armnn::INetworkPtr network = armnn::INetwork::Create();
+    armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+    armnn::IConnectableLayer* const weightsLayer = network->AddConstantLayer(weights, "Weights");
+    armnn::IConnectableLayer* const fullyConnectedLayer =
+        network->AddFullyConnectedLayer(fullyConnectedDesc, "fully connected");
+    armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+    inputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0));
+    weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
+    fullyConnectedLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+    weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsInfo);
 
     //Set the tensors in the network.
 
-    InputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+    inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
 
     TensorInfo outputTensorInfo(TensorShape({1, 1}), DataType::Float32);
-    fullyConnected->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+    fullyConnectedLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
 
-    return myNetwork;
+    return network;
 }
 
 TEST_SUITE("ClCustomAllocatorTests")
@@ -216,4 +216,4 @@
                             "Expected exception in RuntimeImpl::RuntimeImpl() as allocator was nullptr.");
 }
 
-} // test suite ClCustomAllocatorTests
\ No newline at end of file
+} // test suite ClCustomAllocatorTests