Revision d92da1fbb72490f999b7d0e809d13d0d52dc78ac authored by Linus Torvalds on 21 March 2019, 18:48:06 UTC, committed by Linus Torvalds on 21 March 2019, 18:48:06 UTC
Pull arm64 fixes from Catalin Marinas:
 "Mostly fixes apart from the kprobe blacklist checking which was
  deferred because of conflicting with a fix merged after I pinned the
  arm64 for-next/core branch (f2b3d8566d81 "arm64: kprobe: Always
  blacklist the KVM world-switch code").

  Summary:

   - Update the kprobe blacklist checking for arm64. This was supposed
     to be queued during the merging window but, due to conflicts, it
     was deferred post -rc1

   - Extend the Fujitsu erratum 010001 workaround to A64FX v1r0

   - Whitelist HiSilicon Taishan v110 CPUs as not susceptible to
     Meltdown

   - Export save_stack_trace_regs()

   - Remove obsolete selection of MULTI_IRQ_HANDLER"

* tag 'arm64-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux:
  arm64: remove obsolete selection of MULTI_IRQ_HANDLER
  arm64: kpti: Whitelist HiSilicon Taishan v110 CPUs
  arm64: Add MIDR encoding for HiSilicon Taishan CPUs
  arm64/stacktrace: Export save_stack_trace_regs()
  arm64: apply workaround on A64FX v1r0
  arm64: kprobes: Use arch_populate_kprobe_blacklist()
  arm64: kprobes: Move exception_text check in blacklist
  arm64: kprobes: Remove unneeded RODATA check
  arm64: kprobes: Move extable address check into arch_prepare_kprobe()
2 parent s fb549c5 + e5a5af7
Raw File
memweight.c
// SPDX-License-Identifier: GPL-2.0
#include <linux/export.h>
#include <linux/bug.h>
#include <linux/bitmap.h>

/**
 * memweight - count the total number of bits set in memory area
 * @ptr: pointer to the start of the area
 * @bytes: the size of the area
 */
size_t memweight(const void *ptr, size_t bytes)
{
	size_t ret = 0;
	size_t longs;
	const unsigned char *bitmap = ptr;

	for (; bytes > 0 && ((unsigned long)bitmap) % sizeof(long);
			bytes--, bitmap++)
		ret += hweight8(*bitmap);

	longs = bytes / sizeof(long);
	if (longs) {
		BUG_ON(longs >= INT_MAX / BITS_PER_LONG);
		ret += bitmap_weight((unsigned long *)bitmap,
				longs * BITS_PER_LONG);
		bytes -= longs * sizeof(long);
		bitmap += longs * sizeof(long);
	}
	/*
	 * The reason that this last loop is distinct from the preceding
	 * bitmap_weight() call is to compute 1-bits in the last region smaller
	 * than sizeof(long) properly on big-endian systems.
	 */
	for (; bytes > 0; bytes--, bitmap++)
		ret += hweight8(*bitmap);

	return ret;
}
EXPORT_SYMBOL(memweight);
back to top