34namespace L4Re {
namespace Util {
41 Region() noexcept : _start(~0UL), _end(~0UL) {}
42 Region(
l4_addr_t addr) noexcept : _start(addr), _end(addr) {}
44 : _start(start), _end(end) {}
45 l4_addr_t start() const noexcept {
return _start; }
46 l4_addr_t end() const noexcept {
return _end; }
47 unsigned long size() const noexcept {
return end() - start() + 1; }
48 bool invalid() const noexcept {
return _start == ~0UL && _end == ~0UL; }
49 bool operator < (Region
const &o)
const noexcept
50 {
return end() < o.start(); }
51 bool contains(Region
const &o)
const noexcept
52 {
return o.start() >= start() && o.end() <= end(); }
53 bool operator == (Region
const &o)
const noexcept
54 {
return o.start() == start() && o.end() == end(); }
58template<
typename DS,
typename OPS >
62 L4Re::Rm::Offset _offs;
70 typedef typename OPS::Map_result Map_result;
72 Region_handler() noexcept : _offs(0), _mem(), _flags() {}
73 Region_handler(Dataspace
const &mem,
l4_cap_idx_t client_cap,
74 L4Re::Rm::Offset offset = 0,
76 : _offs(offset), _mem(mem), _client_cap(client_cap), _flags(flags)
79 Dataspace
const &memory() const noexcept
89 L4Re::Rm::Offset offset() const noexcept
94 constexpr bool is_ro() const noexcept
109 Region_handler operator + (
l4_int64_t offset)
const noexcept
111 Region_handler n = *
this; n._offs += offset;
return n;
114 void free(
l4_addr_t start,
unsigned long size)
const noexcept
116 Ops::free(
this, start, size);
119 int map(
l4_addr_t addr, Region
const &r,
bool writable,
120 Map_result *result)
const
122 return Ops::map(
this, addr, r, writable, result);
127 return Ops::map_info(
this, start_addr, end_addr);
133template<
typename Hdlr,
template<
typename T>
class Alloc >
156 typedef Hdlr Region_handler;
158 typedef typename Tree::Iterator Iterator;
159 typedef typename Tree::Const_iterator Const_iterator;
160 typedef typename Tree::Rev_iterator Rev_iterator;
161 typedef typename Tree::Const_rev_iterator Const_rev_iterator;
163 Iterator begin() noexcept {
return _rm.begin(); }
164 Const_iterator begin() const noexcept {
return _rm.begin(); }
165 Iterator end() noexcept {
return _rm.end(); }
166 Const_iterator end() const noexcept {
return _rm.end(); }
168 Iterator area_begin() noexcept {
return _am.begin(); }
169 Const_iterator area_begin() const noexcept {
return _am.begin(); }
170 Iterator area_end() noexcept {
return _am.end(); }
171 Const_iterator area_end() const noexcept {
return _am.end(); }
172 Node area_find(Key_type
const &c)
const noexcept {
return _am.find_node(c); }
174 l4_addr_t min_addr() const noexcept {
return _start; }
175 l4_addr_t max_addr() const noexcept {
return _end; }
180 Node find(Key_type
const &key)
const noexcept
182 Node n = _rm.find_node(key);
189 if (!n->first.contains(key))
195 Node lower_bound(Key_type
const &key)
const noexcept
197 Node n = _rm.lower_bound_node(key);
201 Node lower_bound_area(Key_type
const &key)
const noexcept
203 Node n = _am.lower_bound_node(key);
208 L4Re::Rm::Flags flags = L4Re::Rm::Flags(0),
219 c = Region(addr, addr + size - 1);
220 Node r = _am.find_node(c);
227 if (addr < min_addr() || (addr + size - 1) > max_addr())
229 addr = find_free(addr, max_addr(), size, align, flags);
233 c = Region(addr, addr + size - 1);
234 Node r = _am.find_node(c);
238 if (r->first.end() >= max_addr())
241 addr = r->first.end() + 1;
244 if (_am.insert(c, Hdlr(
typename Hdlr::Dataspace(), 0, 0, flags.region_flags())).second == 0)
250 bool detach_area(
l4_addr_t addr)
noexcept
252 if (_am.remove(addr))
258 void *attach(
void *addr,
unsigned long size, Hdlr
const &hdlr,
259 L4Re::Rm::Flags flags = L4Re::Rm::Flags(0),
266 int err = hdlr.map_info(&beg, &end);
271 beg += hdlr.offset();
272 end = beg + size - 1U;
278 &&
reinterpret_cast<l4_addr_t>(addr) != beg)
284 &&
reinterpret_cast<l4_addr_t>(addr) > beg)
297 Node r = _am.find_node(Region(beg, beg + size - 1));
301 end = r->first.end();
306 beg = find_free(beg, end, size, align, flags);
312 && _am.find_node(Region(beg, beg + size - 1)))
315 if (beg < min_addr() || beg + size - 1 > end)
318 if (_rm.insert(Region(beg, beg + size - 1), hdlr).second == 0)
319 return reinterpret_cast<void*
>(beg);
324 int detach(
void *addr,
unsigned long sz,
unsigned flags,
325 Region *reg, Hdlr *hdlr)
noexcept
328 Region dr(a, a + sz - 1);
336 Hdlr
const &h = r->second;
347 h_copy.free(0, g.size());
359 else if (dr.start() <= g.start())
364 h.free(0, dr.end() + 1 - g.start());
366 unsigned long sz = dr.end() + 1 - g.start();
367 Item &cn =
const_cast<Item &
>(*r);
368 cn.first = Region(dr.end() + 1, g.end());
369 cn.second = cn.second + sz;
373 *reg = Region(g.start(), dr.end());
379 else if (dr.end() >= g.end())
385 h.free(dr.start() - g.start(), g.end() + 1 - dr.start());
387 Item &cn =
const_cast<Item &
>(*r);
388 cn.first = Region(g.start(), dr.start() - 1);
392 *reg = Region(dr.start(), g.end());
399 else if (g.contains(dr))
404 h.free(dr.start() - g.start(), dr.size());
407 Item &cn =
const_cast<Item &
>(*r);
408 cn.first = Region(g.start(), dr.start()-1);
414 err = _rm.insert(Region(dr.end() + 1, g.end()),
415 h + (dr.end() + 1 - g.start())).second;
430 unsigned char align, L4Re::Rm::Flags flags)
const noexcept;
435template<
typename Hdlr,
template<
typename T>
class Alloc >
438 unsigned long size,
unsigned char align, L4Re::Rm::Flags flags)
const noexcept
442 if (addr == ~0UL || addr < min_addr() || addr >= end)
450 if (addr > 0 && addr - 1 > end - size)
453 Region c(addr, addr + size - 1);
454 r = _rm.find_node(c);
460 if (r->first.end() > end - size)
468 else if (r->first.end() > end - size)
@ Detached_ds
Detached data sapce.
@ Detach_again
Detached data space, more to do.
@ Split_ds
Splitted data space, and done.
@ Kept_ds
Kept data space.
@ Detach_overlap
Do an unmap of all overlapping regions.
@ Detach_keep
Do not free the detached data space, ignore the F::Detach_free.
Region Key_type
Type of the key values.
Base_type::Node Node
Return type for find.
ITEM_TYPE Item_type
Type for the items store in the set.
unsigned long l4_addr_t
Address type.
signed long long l4_int64_t
Signed 64bit value.
unsigned long l4_cap_idx_t
Capability selector type.
@ L4_INVALID_CAP
Invalid capability selector.
@ L4_ENOENT
No such entity.
#define L4_INVALID_PTR
Invalid address as pointer type.
#define L4_PAGESHIFT
Size of a page, log2-based.
l4_addr_t l4_round_size(l4_addr_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
@ L4_INVALID_ADDR
Invalid address.
Common L4 ABI Data Types.
Region_flags
Region flags (permissions, cacheability, special).
@ Reserved
Region is reserved (blocked)
@ Detach_free
Free the portion of the data space after detach.
@ Caching_mask
Mask of all Rm cache bits.
@ Search_addr
Search for a suitable address range.
@ In_area
Search only in area, or map into area.