Visual studio now compiles dnn_mnist_advanced, inception and dtest

This commit is contained in:
Fm 2016-06-21 19:15:25 +03:00
parent 943a07cbb9
commit cbb69de299
2 changed files with 54 additions and 35 deletions

View File

@ -78,6 +78,8 @@ elseif (MSVC OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # else if using Visu
message(STATUS "Enabling SSE2 instructions") message(STATUS "Enabling SSE2 instructions")
add_definitions(-DDLIB_HAVE_SSE2) add_definitions(-DDLIB_HAVE_SSE2)
endif() endif()
# DNN module produces long type names for NN definitions - disable this warning for MSVC
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4503")
endif() endif()

View File

@ -1985,9 +1985,59 @@ namespace dlib
// ---------------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------------
namespace impl{ namespace impl{
// helper classes for layer concat processing // // helper classes for layer concat processing
template <template<typename> class... TAG_TYPES> // template <template<typename> class... TAG_TYPES>
// struct concat_helper_impl {
// // this specialization will be used only by MSVC
// constexpr static size_t tag_count() {return 0;}
// static void list_tags(std::ostream& out)
// {
// }
// template<typename SUBNET>
// static void resize_out(resizable_tensor&, const SUBNET&, long)
// {
// }
// template<typename SUBNET>
// static void concat(tensor&, const SUBNET&, size_t)
// {
// }
// template<typename SUBNET>
// static void split(const tensor&, SUBNET&, size_t)
// {
// }
// };
template <template<typename> class TAG_TYPE, template<typename> class... TAG_TYPES>
struct concat_helper_impl{ struct concat_helper_impl{
constexpr static size_t tag_count() {return 1 + concat_helper_impl<TAG_TYPES...>::tag_count();}
static void list_tags(std::ostream& out)
{
out << tag_id<TAG_TYPE>::id << (tag_count() > 1 ? "," : "");
concat_helper_impl<TAG_TYPES...>::list_tags(out);
}
template<typename SUBNET>
static void resize_out(resizable_tensor& out, const SUBNET& sub, long sum_k)
{
auto& t = layer<TAG_TYPE>(sub).get_output();
concat_helper_impl<TAG_TYPES...>::resize_out(out, sub, sum_k + t.k());
}
template<typename SUBNET>
static void concat(tensor& out, const SUBNET& sub, size_t k_offset)
{
auto& t = layer<TAG_TYPE>(sub).get_output();
tt::copy_tensor(out, k_offset, t, 0, t.k());
k_offset += t.k();
concat_helper_impl<TAG_TYPES...>::concat(out, sub, k_offset);
}
template<typename SUBNET>
static void split(const tensor& input, SUBNET& sub, size_t k_offset)
{
auto& t = layer<TAG_TYPE>(sub).get_gradient_input();
tt::copy_tensor(t, 0, input, k_offset, t.k());
k_offset += t.k();
concat_helper_impl<TAG_TYPES...>::split(input, sub, k_offset);
}
}; };
template <template<typename> class TAG_TYPE> template <template<typename> class TAG_TYPE>
struct concat_helper_impl<TAG_TYPE>{ struct concat_helper_impl<TAG_TYPE>{
@ -2016,39 +2066,6 @@ namespace dlib
tt::copy_tensor(t, 0, input, k_offset, t.k()); tt::copy_tensor(t, 0, input, k_offset, t.k());
} }
}; };
template <template<typename> class TAG_TYPE, template<typename> class... TAG_TYPES>
struct concat_helper_impl<TAG_TYPE, TAG_TYPES...>{
constexpr static size_t tag_count() {return 1 + concat_helper_impl<TAG_TYPES...>::tag_count();}
static void list_tags(std::ostream& out)
{
out << tag_id<TAG_TYPE>::id << ",";
concat_helper_impl<TAG_TYPES...>::list_tags(out);
}
template<typename SUBNET>
static void resize_out(resizable_tensor& out, const SUBNET& sub, long sum_k)
{
auto& t = layer<TAG_TYPE>(sub).get_output();
concat_helper_impl<TAG_TYPES...>::resize_out(out, sub, sum_k + t.k());
}
template<typename SUBNET>
static void concat(tensor& out, const SUBNET& sub, size_t k_offset)
{
auto& t = layer<TAG_TYPE>(sub).get_output();
tt::copy_tensor(out, k_offset, t, 0, t.k());
k_offset += t.k();
concat_helper_impl<TAG_TYPES...>::concat(out, sub, k_offset);
}
template<typename SUBNET>
static void split(const tensor& input, SUBNET& sub, size_t k_offset)
{
auto& t = layer<TAG_TYPE>(sub).get_gradient_input();
tt::copy_tensor(t, 0, input, k_offset, t.k());
k_offset += t.k();
concat_helper_impl<TAG_TYPES...>::split(input, sub, k_offset);
}
};
} }
// concat layer // concat layer
template< template<