|
@@ -459,10 +459,13 @@ __drm_mm_interval_first(const struct drm_mm *mm, u64 start, u64 last);
|
|
|
* but using the internal interval tree to accelerate the search for the
|
|
|
* starting node, and so not safe against removal of elements. It assumes
|
|
|
* that @end is within (or is the upper limit of) the drm_mm allocator.
|
|
|
+ * If [@start, @end] are beyond the range of the drm_mm, the iterator may walk
|
|
|
+ * over the special _unallocated_ &drm_mm.head_node, and may even continue
|
|
|
+ * indefinitely.
|
|
|
*/
|
|
|
#define drm_mm_for_each_node_in_range(node__, mm__, start__, end__) \
|
|
|
for (node__ = __drm_mm_interval_first((mm__), (start__), (end__)-1); \
|
|
|
- node__ && node__->start < (end__); \
|
|
|
+ node__->start < (end__); \
|
|
|
node__ = list_next_entry(node__, node_list))
|
|
|
|
|
|
void drm_mm_scan_init_with_range(struct drm_mm_scan *scan,
|