- template <typename PreviousLayer, IndexType OutDims>
- class AffineTransform<PreviousLayer, OutDims, std::enable_if_t<(PreviousLayer::OutputDimensions < 2*64-1)>> {
- public:
- // Input/output type
- using InputType = typename PreviousLayer::OutputType;
- using OutputType = std::int32_t;
- static_assert(std::is_same<InputType, std::uint8_t>::value, "");
-
- // Number of input/output dimensions
- static constexpr IndexType InputDimensions =
- PreviousLayer::OutputDimensions;
- static constexpr IndexType OutputDimensions = OutDims;
- static constexpr IndexType PaddedInputDimensions =
- ceil_to_multiple<IndexType>(InputDimensions, MaxSimdWidth);
-
- static_assert(PaddedInputDimensions < 128, "Something went wrong. This specialization should not have been chosen.");
-
-#if defined (USE_SSSE3)
- static constexpr const IndexType OutputSimdWidth = SimdWidth / 4;
- static constexpr const IndexType InputSimdWidth = SimdWidth;
-#endif
-
- // Size of forward propagation buffer used in this layer
- static constexpr std::size_t SelfBufferSize =
- ceil_to_multiple(OutputDimensions * sizeof(OutputType), CacheLineSize);
-
- // Size of the forward propagation buffer used from the input layer to this layer
- static constexpr std::size_t BufferSize =
- PreviousLayer::BufferSize + SelfBufferSize;
-
- // Hash value embedded in the evaluation file
- static constexpr std::uint32_t get_hash_value() {
- std::uint32_t hashValue = 0xCC03DAE4u;
- hashValue += OutputDimensions;
- hashValue ^= PreviousLayer::get_hash_value() >> 1;
- hashValue ^= PreviousLayer::get_hash_value() << 31;
- return hashValue;
- }
-
- static IndexType get_weight_index_scrambled(IndexType i)
- {
- return
- (i / 4) % (PaddedInputDimensions / 4) * OutputDimensions * 4 +
- i / PaddedInputDimensions * 4 +
- i % 4;
- }
-
- static IndexType get_weight_index(IndexType i)
- {
-#if defined (USE_SSSE3)
- return get_weight_index_scrambled(i);
-#else
- return i;
-#endif
- }
-
- // Read network parameters
- bool read_parameters(std::istream& stream) {
- if (!previousLayer.read_parameters(stream)) return false;
- for (std::size_t i = 0; i < OutputDimensions; ++i)
- biases[i] = read_little_endian<BiasType>(stream);
- for (std::size_t i = 0; i < OutputDimensions * PaddedInputDimensions; ++i)
- weights[get_weight_index(i)] = read_little_endian<WeightType>(stream);
-
- return !stream.fail();
- }
-
- // Write network parameters
- bool write_parameters(std::ostream& stream) const {
- if (!previousLayer.write_parameters(stream)) return false;
- for (std::size_t i = 0; i < OutputDimensions; ++i)
- write_little_endian<BiasType>(stream, biases[i]);
-
- for (std::size_t i = 0; i < OutputDimensions * PaddedInputDimensions; ++i)
- write_little_endian<WeightType>(stream, weights[get_weight_index(i)]);
-
- return !stream.fail();
- }
- // Forward propagation
- const OutputType* propagate(
- const TransformedFeatureType* transformedFeatures, char* buffer) const {
- const auto input = previousLayer.propagate(
- transformedFeatures, buffer + SelfBufferSize);
- const auto output = reinterpret_cast<OutputType*>(buffer);