83 extern uint32_t ntohl(uint32_t);
84 extern uint16_t ntohs(uint16_t);
85 #else
86 extern in_addr_t htonl(in_addr_t);
87 extern in_port_t htons(in_port_t);
88 extern in_addr_t ntohl(in_addr_t);
89 extern in_port_t ntohs(in_port_t);
90 #endif /* !defined(_XPG4_2) || defined(__EXTENSIONS__) || defined(_XPG5) */
91 #if !(defined(_XPG4_2) || defined(_XPG5)) || defined(__EXTENSIONS__)
92 extern uint64_t htonll(uint64_t);
93 extern uint64_t ntohll(uint64_t);
94 #endif /* !(_XPG4_2||_XPG5) || __EXTENSIONS__ */
95 #endif
96
97 #if !defined(_XPG4_2) || defined(__EXTENSIONS__)
98
99 /*
100 * Macros to reverse byte order
101 */
102 #define BSWAP_8(x) ((x) & 0xff)
103 #define BSWAP_16(x) ((BSWAP_8(x) << 8) | BSWAP_8((x) >> 8))
104 #define BSWAP_32(x) ((BSWAP_16(x) << 16) | BSWAP_16((x) >> 16))
105 #define BSWAP_64(x) ((BSWAP_32(x) << 32) | BSWAP_32((x) >> 32))
106
107 #define BMASK_8(x) ((x) & 0xff)
108 #define BMASK_16(x) ((x) & 0xffff)
109 #define BMASK_32(x) ((x) & 0xffffffff)
110 #define BMASK_64(x) (x)
111
112 /*
113 * Macros to convert from a specific byte order to/from native byte order
114 */
115 #ifdef _BIG_ENDIAN
116 #define BE_8(x) BMASK_8(x)
117 #define BE_16(x) BMASK_16(x)
118 #define BE_32(x) BMASK_32(x)
119 #define BE_64(x) BMASK_64(x)
120 #define LE_8(x) BSWAP_8(x)
121 #define LE_16(x) BSWAP_16(x)
122 #define LE_32(x) BSWAP_32(x)
123 #define LE_64(x) BSWAP_64(x)
124 #else
125 #define LE_8(x) BMASK_8(x)
126 #define LE_16(x) BMASK_16(x)
127 #define LE_32(x) BMASK_32(x)
128 #define LE_64(x) BMASK_64(x)
129 #define BE_8(x) BSWAP_8(x)
130 #define BE_16(x) BSWAP_16(x)
131 #define BE_32(x) BSWAP_32(x)
132 #define BE_64(x) BSWAP_64(x)
133 #endif
134
135 /*
136 * Macros to read unaligned values from a specific byte order to
137 * native byte order
138 */
139
140 #define BE_IN8(xa) \
141 *((uint8_t *)(xa))
142
143 #define BE_IN16(xa) \
144 (((uint16_t)BE_IN8(xa) << 8) | BE_IN8((uint8_t *)(xa)+1))
145
146 #define BE_IN32(xa) \
147 (((uint32_t)BE_IN16(xa) << 16) | BE_IN16((uint8_t *)(xa)+2))
148
149 #define BE_IN64(xa) \
150 (((uint64_t)BE_IN32(xa) << 32) | BE_IN32((uint8_t *)(xa)+4))
151
152 #define LE_IN8(xa) \
153 *((uint8_t *)(xa))
154
155 #define LE_IN16(xa) \
156 (((uint16_t)LE_IN8((uint8_t *)(xa) + 1) << 8) | LE_IN8(xa))
157
158 #define LE_IN32(xa) \
159 (((uint32_t)LE_IN16((uint8_t *)(xa) + 2) << 16) | LE_IN16(xa))
160
161 #define LE_IN64(xa) \
162 (((uint64_t)LE_IN32((uint8_t *)(xa) + 4) << 32) | LE_IN32(xa))
163
164 /*
165 * Macros to write unaligned values from native byte order to a specific byte
166 * order.
167 */
168
169 #define BE_OUT8(xa, yv) *((uint8_t *)(xa)) = (uint8_t)(yv);
170
171 #define BE_OUT16(xa, yv) \
172 BE_OUT8((uint8_t *)(xa) + 1, yv); \
173 BE_OUT8((uint8_t *)(xa), (yv) >> 8);
174
175 #define BE_OUT32(xa, yv) \
176 BE_OUT16((uint8_t *)(xa) + 2, yv); \
177 BE_OUT16((uint8_t *)(xa), (yv) >> 16);
178
179 #define BE_OUT64(xa, yv) \
180 BE_OUT32((uint8_t *)(xa) + 4, yv); \
181 BE_OUT32((uint8_t *)(xa), (yv) >> 32);
182
183 #define LE_OUT8(xa, yv) *((uint8_t *)(xa)) = (uint8_t)(yv);
184
185 #define LE_OUT16(xa, yv) \
186 LE_OUT8((uint8_t *)(xa), yv); \
187 LE_OUT8((uint8_t *)(xa) + 1, (yv) >> 8);
188
189 #define LE_OUT32(xa, yv) \
190 LE_OUT16((uint8_t *)(xa), yv); \
191 LE_OUT16((uint8_t *)(xa) + 2, (yv) >> 16);
192
193 #define LE_OUT64(xa, yv) \
194 LE_OUT32((uint8_t *)(xa), yv); \
195 LE_OUT32((uint8_t *)(xa) + 4, (yv) >> 32);
196
197 #endif /* !defined(_XPG4_2) || defined(__EXTENSIONS__) */
198
199 #ifdef __cplusplus
200 }
201 #endif
|
83 extern uint32_t ntohl(uint32_t);
84 extern uint16_t ntohs(uint16_t);
85 #else
86 extern in_addr_t htonl(in_addr_t);
87 extern in_port_t htons(in_port_t);
88 extern in_addr_t ntohl(in_addr_t);
89 extern in_port_t ntohs(in_port_t);
90 #endif /* !defined(_XPG4_2) || defined(__EXTENSIONS__) || defined(_XPG5) */
91 #if !(defined(_XPG4_2) || defined(_XPG5)) || defined(__EXTENSIONS__)
92 extern uint64_t htonll(uint64_t);
93 extern uint64_t ntohll(uint64_t);
94 #endif /* !(_XPG4_2||_XPG5) || __EXTENSIONS__ */
95 #endif
96
97 #if !defined(_XPG4_2) || defined(__EXTENSIONS__)
98
99 /*
100 * Macros to reverse byte order
101 */
102 #define BSWAP_8(x) ((x) & 0xff)
103 #if !defined(__i386) && !defined(__amd64)
104 #define BSWAP_16(x) ((BSWAP_8(x) << 8) | BSWAP_8((x) >> 8))
105 #define BSWAP_32(x) (((uint32_t)(x) << 24) | \
106 (((uint32_t)(x) << 8) & 0xff0000) | \
107 (((uint32_t)(x) >> 8) & 0xff00) | \
108 ((uint32_t)(x) >> 24))
109 #else /* x86 */
110 #define BSWAP_16(x) htons(x)
111 #define BSWAP_32(x) htonl(x)
112 #endif /* !__i386 && !__amd64 */
113
114 #if (!defined(__i386) && !defined(__amd64)) || \
115 ((defined(_XPG4_2) || defined(_XPG5)) && !defined(__EXTENSIONS__))
116 #define BSWAP_64(x) (((uint64_t)(x) << 56) | \
117 (((uint64_t)(x) << 40) & 0xff000000000000ULL) | \
118 (((uint64_t)(x) << 24) & 0xff0000000000ULL) | \
119 (((uint64_t)(x) << 8) & 0xff00000000ULL) | \
120 (((uint64_t)(x) >> 8) & 0xff000000ULL) | \
121 (((uint64_t)(x) >> 24) & 0xff0000ULL) | \
122 (((uint64_t)(x) >> 40) & 0xff00ULL) | \
123 ((uint64_t)(x) >> 56))
124 #else /* x86 with non-XPG extensions allowed */
125 #define BSWAP_64(x) htonll(x)
126 #endif /* (!__i386&&!__amd64) || ((_XPG4_2||_XPG5) && !__EXTENSIONS__) */
127
128 #define BMASK_8(x) ((x) & 0xff)
129 #define BMASK_16(x) ((x) & 0xffff)
130 #define BMASK_32(x) ((x) & 0xffffffff)
131 #define BMASK_64(x) (x)
132
133 /*
134 * Macros to convert from a specific byte order to/from native byte order
135 */
136 #ifdef _BIG_ENDIAN
137 #define BE_8(x) BMASK_8(x)
138 #define BE_16(x) BMASK_16(x)
139 #define BE_32(x) BMASK_32(x)
140 #define BE_64(x) BMASK_64(x)
141 #define LE_8(x) BSWAP_8(x)
142 #define LE_16(x) BSWAP_16(x)
143 #define LE_32(x) BSWAP_32(x)
144 #define LE_64(x) BSWAP_64(x)
145 #else
146 #define LE_8(x) BMASK_8(x)
147 #define LE_16(x) BMASK_16(x)
148 #define LE_32(x) BMASK_32(x)
149 #define LE_64(x) BMASK_64(x)
150 #define BE_8(x) BSWAP_8(x)
151 #define BE_16(x) BSWAP_16(x)
152 #define BE_32(x) BSWAP_32(x)
153 #define BE_64(x) BSWAP_64(x)
154 #endif
155
156 /*
157 * Macros to read unaligned values from a specific byte order to
158 * native byte order
159 */
160
161 #define BE_IN8(xa) \
162 *((uint8_t *)(xa))
163
164 #if !defined(__i386) && !defined(__amd64)
165 #define BE_IN16(xa) \
166 (((uint16_t)BE_IN8(xa) << 8) | BE_IN8((uint8_t *)(xa) + 1))
167
168 #define BE_IN32(xa) \
169 (((uint32_t)BE_IN16(xa) << 16) | BE_IN16((uint8_t *)(xa) + 2))
170
171 #else /* x86 */
172 #define BE_IN16(xa) htons(*((uint16_t *)(void *)(xa)))
173 #define BE_IN32(xa) htonl(*((uint32_t *)(void *)(xa)))
174 #endif /* !__i386 && !__amd64 */
175
176 #if (!defined(__i386) && !defined(__amd64)) || \
177 ((defined(_XPG4_2) || defined(_XPG5)) && !defined(__EXTENSIONS__))
178 #define BE_IN64(xa) \
179 (((uint64_t)BE_IN32(xa) << 32) | BE_IN32((uint8_t *)(xa) + 4))
180 #else /* x86 with non-XPG extensions allowed */
181 #define BE_IN64(xa) htonll(*((uint64_t *)(void *)(xa)))
182 #endif /* (!__i386&&!__amd64) || ((_XPG4_2||_XPG5) && !__EXTENSIONS__) */
183
184 #define LE_IN8(xa) \
185 *((uint8_t *)(xa))
186
187 #define LE_IN16(xa) \
188 (((uint16_t)LE_IN8((uint8_t *)(xa) + 1) << 8) | LE_IN8(xa))
189
190 #define LE_IN32(xa) \
191 (((uint32_t)LE_IN16((uint8_t *)(xa) + 2) << 16) | LE_IN16(xa))
192
193 #define LE_IN64(xa) \
194 (((uint64_t)LE_IN32((uint8_t *)(xa) + 4) << 32) | LE_IN32(xa))
195
196 /*
197 * Macros to write unaligned values from native byte order to a specific byte
198 * order.
199 */
200
201 #define BE_OUT8(xa, yv) *((uint8_t *)(xa)) = (uint8_t)(yv);
202
203 #define BE_OUT16(xa, yv) \
204 BE_OUT8((uint8_t *)(xa) + 1, yv); \
205 BE_OUT8((uint8_t *)(xa), (yv) >> 8);
206
207 #define BE_OUT32(xa, yv) \
208 BE_OUT16((uint8_t *)(xa) + 2, yv); \
209 BE_OUT16((uint8_t *)(xa), (yv) >> 16);
210
211 #if (!defined(__i386) && !defined(__amd64)) || \
212 ((defined(_XPG4_2) || defined(_XPG5)) && !defined(__EXTENSIONS__))
213 #define BE_OUT64(xa, yv) \
214 BE_OUT32((uint8_t *)(xa) + 4, yv); \
215 BE_OUT32((uint8_t *)(xa), (yv) >> 32);
216 #else /* x86 with non-XPG extensions allowed */
217 #define BE_OUT64(xa, yv) *((uint64_t *)(void *)(xa)) = htonll((uint64_t)(yv));
218 #endif /* (!__i386&&!__amd64) || ((_XPG4_2||_XPG5) && !__EXTENSIONS__) */
219
220 #define LE_OUT8(xa, yv) *((uint8_t *)(xa)) = (uint8_t)(yv);
221
222 #define LE_OUT16(xa, yv) \
223 LE_OUT8((uint8_t *)(xa), yv); \
224 LE_OUT8((uint8_t *)(xa) + 1, (yv) >> 8);
225
226 #define LE_OUT32(xa, yv) \
227 LE_OUT16((uint8_t *)(xa), yv); \
228 LE_OUT16((uint8_t *)(xa) + 2, (yv) >> 16);
229
230 #define LE_OUT64(xa, yv) \
231 LE_OUT32((uint8_t *)(xa), yv); \
232 LE_OUT32((uint8_t *)(xa) + 4, (yv) >> 32);
233
234 #endif /* !defined(_XPG4_2) || defined(__EXTENSIONS__) */
235
236 #ifdef __cplusplus
237 }
238 #endif
|