24 bool hdf5_read_scalar_data_helper(T & value, data_type
const &data_id, type_type
const &native_id);
26 bool hdf5_read_scalar_attribute_helper(T & value, attribute_type
const &attribute_id, type_type
const &native_id);
28 bool hdf5_read_vector_data_helper(T * value, data_type
const &data_id, type_type
const &native_id,
29 std::vector<std::size_t>
const &chunk,
30 std::vector<std::size_t>
const &offset,
31 std::vector<std::size_t>
const &data_size);
33 bool hdf5_read_vector_attribute_helper(std::string
const &path, T * value, attribute_type
const &attribute_id, type_type
const &native_id,
34 std::vector<std::size_t>
const &chunk,
35 std::vector<std::size_t>
const &data_size);
43 if ((path = complete_path(path)).find_last_of(
'@') == std::string::npos) {
48 detail::data_type data_id(H5Dopen2(context_->file_id_, path.c_str(), H5P_DEFAULT));
49 detail::type_type type_id(H5Dget_type(data_id));
50 detail::type_type native_id(H5Tget_native_type(type_id, H5T_DIR_ASCEND));
51 if (H5Tget_class(native_id) == H5T_STRING && !detail::check_error(H5Tis_variable_str(type_id))) {
52 std::string raw(H5Tget_size(type_id) + 1,
'\0');
53 detail::check_error(H5Dread(data_id, native_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &raw[0]));
54 value = cast< T >(raw);
55 }
else if (H5Tget_class(native_id) == H5T_STRING) {
57 detail::check_error(H5Dread(data_id, native_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, &raw));
58 value = cast< T >(std::string(raw));
59 detail::check_error(H5Dvlen_reclaim(type_id, detail::space_type(H5Dget_space(data_id)), H5P_DEFAULT, &raw));
60 }
else if(detail::hdf5_read_scalar_data_helper(value, data_id, native_id)) {
64 if (!is_attribute(path))
68 detail::attribute_type attribute_id(H5Aopen_by_name(
70 , path.substr(0, path.find_last_of(
'@')).c_str()
71 , path.substr(path.find_last_of(
'@') + 1).c_str()
72 , H5P_DEFAULT, H5P_DEFAULT
74 detail::type_type type_id(H5Aget_type(attribute_id));
75 detail::type_type native_id(H5Tget_native_type(type_id, H5T_DIR_ASCEND));
76 if (H5Tget_class(native_id) == H5T_STRING && !detail::check_error(H5Tis_variable_str(type_id))) {
77 std::string raw(H5Tget_size(type_id) + 1,
'\0');
78 detail::check_error(H5Aread(attribute_id, native_id, &raw[0]));
79 value = cast< T >(raw);
80 }
else if (H5Tget_class(native_id) == H5T_STRING) {
82 detail::check_error(H5Aread(attribute_id, native_id, &raw));
83 value = cast< T >(std::string(raw));
84 }
else if(detail::hdf5_read_scalar_attribute_helper(value, attribute_id, native_id)) {
88 #define ALPS_HDF5_READ_SCALAR(T) template void archive::read<T>(std::string, T &) const; 92 auto archive::read(std::string path, T * value, std::vector<std::size_t> chunk, std::vector<std::size_t> offset)
const ->
ONLY_NATIVE(T,
void) {
96 std::vector<std::size_t> data_size = extent(path);
97 if (offset.size() == 0)
98 offset = std::vector<std::size_t>(dimensions(path), 0);
99 if (data_size.size() != chunk.size() || data_size.size() != offset.size())
100 throw archive_error(
"wrong size or offset passed for path: " + path +
ALPS_STACKTRACE);
101 for (std::size_t i = 0; i < data_size.size(); ++i)
102 if (data_size[i] < chunk[i] + offset[i])
103 throw archive_error(
"passed size of offset exeed data size for path: " + path +
ALPS_STACKTRACE);
107 for (std::size_t i = 0; i < data_size.size(); ++i)
109 throw archive_error(
"size is zero in one dimension in path: " + path +
ALPS_STACKTRACE);
110 if ((path = complete_path(path)).find_last_of(
'@') == std::string::npos) {
112 throw path_not_found(
"the path does not exist: " + path +
ALPS_STACKTRACE);
114 throw archive_error(
"scalar - vector conflict in path: " + path +
ALPS_STACKTRACE);
115 detail::data_type data_id(H5Dopen2(context_->file_id_, path.c_str(), H5P_DEFAULT));
116 detail::type_type type_id(H5Dget_type(data_id));
117 detail::type_type native_id(H5Tget_native_type(type_id, H5T_DIR_ASCEND));
118 if (H5Tget_class(native_id) == H5T_STRING && !detail::check_error(H5Tis_variable_str(type_id)))
119 throw std::logic_error(
"multidimensional dataset of fixed string datas is not implemented (" + path +
")" +
ALPS_STACKTRACE);
120 else if (H5Tget_class(native_id) == H5T_STRING) {
121 std::size_t len = std::accumulate(chunk.begin(), chunk.end(), std::size_t(1), std::multiplies<std::size_t>());
122 std::unique_ptr<char * []> raw(
125 if (std::equal(chunk.begin(), chunk.end(), data_size.begin())) {
126 detail::check_error(H5Dread(data_id, native_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, raw.get()));
127 cast(raw.get(), raw.get() + len, value);
128 detail::check_error(H5Dvlen_reclaim(type_id, detail::space_type(H5Dget_space(data_id)), H5P_DEFAULT, raw.get()));
130 std::vector<hsize_t> offset_hid(offset.begin(), offset.end()),
131 chunk_hid(chunk.begin(), chunk.end());
132 detail::space_type space_id(H5Dget_space(data_id));
133 detail::check_error(H5Sselect_hyperslab(space_id, H5S_SELECT_SET, &offset_hid.front(), NULL, &chunk_hid.front(), NULL));
134 detail::space_type mem_id(H5Screate_simple(static_cast<int>(chunk_hid.size()), &chunk_hid.front(), NULL));
135 detail::check_error(H5Dread(data_id, native_id, mem_id, space_id, H5P_DEFAULT, raw.get()));
136 cast(raw.get(), raw.get() + len, value);
137 detail::check_error(H5Dvlen_reclaim(type_id, mem_id, H5P_DEFAULT, raw.get()));
139 }
else if(detail::hdf5_read_vector_data_helper(value, data_id, native_id, chunk, offset, data_size)) {
142 if (!is_attribute(path))
143 throw path_not_found(
"the path does not exist: " + path +
ALPS_STACKTRACE);
145 throw wrong_type(
"scalar - vector conflict in path: " + path +
ALPS_STACKTRACE);
147 if (is_group(path.substr(0, path.find_last_of(
'@'))))
148 parent_id = detail::check_error(H5Gopen2(context_->file_id_, path.substr(0, path.find_last_of(
'@')).c_str(), H5P_DEFAULT));
149 else if (is_data(path.substr(0, path.find_last_of(
'@') - 1)))
150 parent_id = detail::check_error(H5Dopen2(context_->file_id_, path.substr(0, path.find_last_of(
'@')).c_str(), H5P_DEFAULT));
152 throw path_not_found(
"unknown path: " + path.substr(0, path.find_last_of(
'@')) +
ALPS_STACKTRACE);
153 detail::attribute_type attribute_id(H5Aopen(parent_id, path.substr(path.find_last_of(
'@') + 1).c_str(), H5P_DEFAULT));
154 detail::type_type type_id(H5Aget_type(attribute_id));
155 detail::type_type native_id(H5Tget_native_type(type_id, H5T_DIR_ASCEND));
156 if (H5Tget_class(native_id) == H5T_STRING && !detail::check_error(H5Tis_variable_str(type_id)))
157 throw std::logic_error(
"multidimensional dataset of fixed string datas is not implemented (" + path +
")" +
ALPS_STACKTRACE);
158 else if (H5Tget_class(native_id) == H5T_STRING) {
159 std::size_t len = std::accumulate(chunk.begin(), chunk.end(), std::size_t(1), std::multiplies<std::size_t>());
160 std::unique_ptr<char *[]> raw(
163 if (std::equal(chunk.begin(), chunk.end(), data_size.begin())) {
164 detail::check_error(H5Aread(attribute_id, native_id, raw.get()));
165 cast(raw.get(), raw.get() + len, value);
167 throw std::logic_error(
"non continous multidimensional dataset as attributes are not implemented (" + path +
")" +
ALPS_STACKTRACE);
168 detail::check_error(H5Dvlen_reclaim(type_id, detail::space_type(H5Aget_space(attribute_id)), H5P_DEFAULT, raw.get()));
169 }
else if (H5Tget_class(native_id) == H5T_STRING) {
171 detail::check_error(H5Aread(attribute_id, native_id, raw));
172 throw std::logic_error(
"multidimensional dataset of variable len string datas is not implemented (" + path +
")" +
ALPS_STACKTRACE);
173 }
else if(detail::hdf5_read_vector_attribute_helper(path, value, attribute_id, native_id, chunk, data_size)) {
175 if (is_group(path.substr(0, path.find_last_of(
'@'))))
176 detail::check_group(parent_id);
178 detail::check_data(parent_id);
182 #define ALPS_HDF5_READ_VECTOR(T) template void archive::read<T>(std::string, T *, std::vector<std::size_t>, std::vector<std::size_t>) const;
#define ALPS_HDF5_READ_SCALAR(T)
ALPS_FOREACH_NATIVE_HDF5_TYPE(ALPS_HDF5_READ_SCALAR)
#define ONLY_NATIVE(T, R)
#define ALPS_HDF5_FAKE_THREADSAFETY
auto read(std::string path, T *, std::vector< std::size_t >, std::vector< std::size_t >=std::vector< std::size_t >()) const -> typename std::enable_if<!is_native_type< T >::value, void >::type
Metafunction-predicate: returns true_type if type T is scalar.
#define ALPS_HDF5_READ_VECTOR(T)