[drm-intel:for-linux-next-fixes 7/7] drivers/gpu/drm/i915/i915_gem_evict.c:318:31: error: implicit declaration of function 'i915_vma_has_userfault'

kbuild test robot fengguang.wu at intel.com
Mon Oct 9 23:39:00 UTC 2017


tree:   git://anongit.freedesktop.org/drm-intel for-linux-next-fixes
head:   72872c99b6dbc80362965cd30489c849f0663140
commit: 72872c99b6dbc80362965cd30489c849f0663140 [7/7] drm/i915: Check PIN_NONFAULT overlaps in evict_for_node
config: x86_64-randconfig-x018-201741 (attached as .config)
compiler: gcc-6 (Debian 6.2.0-3) 6.2.0 20160901
reproduce:
        git checkout 72872c99b6dbc80362965cd30489c849f0663140
        # save the attached .config to linux build tree
        make ARCH=x86_64 

All errors (new ones prefixed by >>):

   drivers/gpu/drm/i915/i915_gem_evict.c: In function 'i915_gem_evict_for_node':
>> drivers/gpu/drm/i915/i915_gem_evict.c:318:31: error: implicit declaration of function 'i915_vma_has_userfault' [-Werror=implicit-function-declaration]
      if (flags & PIN_NONFAULT && i915_vma_has_userfault(vma)) {
                                  ^~~~~~~~~~~~~~~~~~~~~~
   Cyclomatic Complexity 5 include/linux/compiler.h:__read_once_size
   Cyclomatic Complexity 5 include/linux/compiler.h:__write_once_size
   Cyclomatic Complexity 1 arch/x86/include/asm/bitops.h:variable_test_bit
   Cyclomatic Complexity 1 include/linux/list.h:INIT_LIST_HEAD
   Cyclomatic Complexity 1 include/linux/list.h:__list_add_valid
   Cyclomatic Complexity 1 include/linux/list.h:__list_del_entry_valid
   Cyclomatic Complexity 2 include/linux/list.h:__list_add
   Cyclomatic Complexity 1 include/linux/list.h:list_add
   Cyclomatic Complexity 1 include/linux/list.h:__list_del
   Cyclomatic Complexity 2 include/linux/list.h:__list_del_entry
   Cyclomatic Complexity 1 include/linux/list.h:list_del
   Cyclomatic Complexity 1 include/linux/list.h:list_move
   Cyclomatic Complexity 1 include/linux/list.h:list_empty
   Cyclomatic Complexity 1 include/linux/err.h:PTR_ERR
   Cyclomatic Complexity 1 include/linux/err.h:IS_ERR
   Cyclomatic Complexity 1 arch/x86/include/asm/atomic.h:atomic_read
   Cyclomatic Complexity 1 include/linux/lockdep.h:lock_is_held
   Cyclomatic Complexity 1 include/linux/jump_label.h:static_key_count
   Cyclomatic Complexity 2 include/linux/jump_label.h:static_key_false
   Cyclomatic Complexity 2 include/linux/cpumask.h:cpumask_check
   Cyclomatic Complexity 1 include/linux/cpumask.h:cpumask_test_cpu
   Cyclomatic Complexity 5 arch/x86/include/asm/preempt.h:__preempt_count_add
   Cyclomatic Complexity 1 arch/x86/include/asm/preempt.h:__preempt_count_dec_and_test
   Cyclomatic Complexity 1 include/linux/rcupdate.h:rcu_read_lock_sched_notrace
   Cyclomatic Complexity 2 include/linux/rcupdate.h:rcu_read_unlock_sched_notrace
   Cyclomatic Complexity 2 include/linux/kref.h:kref_put
   Cyclomatic Complexity 1 include/drm/drm_mm.h:drm_mm_node_allocated
   Cyclomatic Complexity 1 include/drm/drm_gem.h:__drm_gem_object_put
   Cyclomatic Complexity 1 include/drm/drm_gem.h:__drm_gem_object_unreference
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_gem_request.h:i915_gem_active_isset
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_gem_object.h:i915_gem_object_put
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:i915_vma_get_active
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:i915_vma_is_active
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:i915_vma_pin_count
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:i915_vma_is_pinned
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:__i915_vma_pin
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:__i915_vma_unpin
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/i915_vma.h:i915_vma_unpin
   Cyclomatic Complexity 6 drivers/gpu/drm/i915/i915_trace.h:trace_i915_gem_evict
   Cyclomatic Complexity 6 drivers/gpu/drm/i915/i915_trace.h:trace_i915_gem_evict_vm
   Cyclomatic Complexity 6 drivers/gpu/drm/i915/i915_trace.h:trace_i915_gem_evict_node
   Cyclomatic Complexity 2 drivers/gpu/drm/i915/i915_drv.h:i915_gem_drain_freed_objects
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/i915_gem_evict.c:ggtt_is_idle
   Cyclomatic Complexity 1 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:mock_color_adjust
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/i915_gem_evict.c:mark_free
   Cyclomatic Complexity 3 drivers/gpu/drm/i915/i915_gem_evict.c:ggtt_flush
   Cyclomatic Complexity 9 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:populate_ggtt
   Cyclomatic Complexity 9 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:cleanup_objects
   Cyclomatic Complexity 5 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:igt_overcommit
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:unpin_ggtt
   Cyclomatic Complexity 34 drivers/gpu/drm/i915/i915_gem_evict.c:i915_gem_evict_something
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:igt_evict_something
   Cyclomatic Complexity 29 drivers/gpu/drm/i915/i915_gem_evict.c:i915_gem_evict_for_node
   Cyclomatic Complexity 7 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:igt_evict_for_cache_color
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:igt_evict_for_vma
   Cyclomatic Complexity 16 drivers/gpu/drm/i915/i915_gem_evict.c:i915_gem_evict_vm
   Cyclomatic Complexity 4 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:igt_evict_vm
   Cyclomatic Complexity 2 drivers/gpu/drm/i915/selftests/i915_gem_evict.c:i915_gem_evict_mock_selftests
   cc1: all warnings being treated as errors

vim +/i915_vma_has_userfault +318 drivers/gpu/drm/i915/i915_gem_evict.c

   236	
   237	/**
   238	 * i915_gem_evict_for_vma - Evict vmas to make room for binding a new one
   239	 * @vm: address space to evict from
   240	 * @target: range (and color) to evict for
   241	 * @flags: additional flags to control the eviction algorithm
   242	 *
   243	 * This function will try to evict vmas that overlap the target node.
   244	 *
   245	 * To clarify: This is for freeing up virtual address space, not for freeing
   246	 * memory in e.g. the shrinker.
   247	 */
   248	int i915_gem_evict_for_node(struct i915_address_space *vm,
   249				    struct drm_mm_node *target,
   250				    unsigned int flags)
   251	{
   252		LIST_HEAD(eviction_list);
   253		struct drm_mm_node *node;
   254		u64 start = target->start;
   255		u64 end = start + target->size;
   256		struct i915_vma *vma, *next;
   257		bool check_color;
   258		int ret = 0;
   259	
   260		lockdep_assert_held(&vm->i915->drm.struct_mutex);
   261		GEM_BUG_ON(!IS_ALIGNED(start, I915_GTT_PAGE_SIZE));
   262		GEM_BUG_ON(!IS_ALIGNED(end, I915_GTT_PAGE_SIZE));
   263	
   264		trace_i915_gem_evict_node(vm, target, flags);
   265	
   266		/* Retire before we search the active list. Although we have
   267		 * reasonable accuracy in our retirement lists, we may have
   268		 * a stray pin (preventing eviction) that can only be resolved by
   269		 * retiring.
   270		 */
   271		if (!(flags & PIN_NONBLOCK))
   272			i915_gem_retire_requests(vm->i915);
   273	
   274		check_color = vm->mm.color_adjust;
   275		if (check_color) {
   276			/* Expand search to cover neighbouring guard pages (or lack!) */
   277			if (start)
   278				start -= I915_GTT_PAGE_SIZE;
   279	
   280			/* Always look at the page afterwards to avoid the end-of-GTT */
   281			end += I915_GTT_PAGE_SIZE;
   282		}
   283		GEM_BUG_ON(start >= end);
   284	
   285		drm_mm_for_each_node_in_range(node, &vm->mm, start, end) {
   286			/* If we find any non-objects (!vma), we cannot evict them */
   287			if (node->color == I915_COLOR_UNEVICTABLE) {
   288				ret = -ENOSPC;
   289				break;
   290			}
   291	
   292			GEM_BUG_ON(!node->allocated);
   293			vma = container_of(node, typeof(*vma), node);
   294	
   295			/* If we are using coloring to insert guard pages between
   296			 * different cache domains within the address space, we have
   297			 * to check whether the objects on either side of our range
   298			 * abutt and conflict. If they are in conflict, then we evict
   299			 * those as well to make room for our guard pages.
   300			 */
   301			if (check_color) {
   302				if (node->start + node->size == target->start) {
   303					if (node->color == target->color)
   304						continue;
   305				}
   306				if (node->start == target->start + target->size) {
   307					if (node->color == target->color)
   308						continue;
   309				}
   310			}
   311	
   312			if (flags & PIN_NONBLOCK &&
   313			    (i915_vma_is_pinned(vma) || i915_vma_is_active(vma))) {
   314				ret = -ENOSPC;
   315				break;
   316			}
   317	
 > 318			if (flags & PIN_NONFAULT && i915_vma_has_userfault(vma)) {
   319				ret = -ENOSPC;
   320				break;
   321			}
   322	
   323			/* Overlap of objects in the same batch? */
   324			if (i915_vma_is_pinned(vma)) {
   325				ret = -ENOSPC;
   326				if (vma->exec_flags &&
   327				    *vma->exec_flags & EXEC_OBJECT_PINNED)
   328					ret = -EINVAL;
   329				break;
   330			}
   331	
   332			/* Never show fear in the face of dragons!
   333			 *
   334			 * We cannot directly remove this node from within this
   335			 * iterator and as with i915_gem_evict_something() we employ
   336			 * the vma pin_count in order to prevent the action of
   337			 * unbinding one vma from freeing (by dropping its active
   338			 * reference) another in our eviction list.
   339			 */
   340			__i915_vma_pin(vma);
   341			list_add(&vma->evict_link, &eviction_list);
   342		}
   343	
   344		list_for_each_entry_safe(vma, next, &eviction_list, evict_link) {
   345			__i915_vma_unpin(vma);
   346			if (ret == 0)
   347				ret = i915_vma_unbind(vma);
   348		}
   349	
   350		return ret;
   351	}
   352	

---
0-DAY kernel test infrastructure                Open Source Technology Center
https://lists.01.org/pipermail/kbuild-all                   Intel Corporation
-------------- next part --------------
A non-text attachment was scrubbed...
Name: .config.gz
Type: application/gzip
Size: 32008 bytes
Desc: not available
URL: <https://lists.freedesktop.org/archives/dri-devel/attachments/20171010/c190b069/attachment-0001.gz>


More information about the dri-devel mailing list