STATNODE_COUNTER(numfullpathfound, "Number of successful fullpath calls");
static long zap_and_exit_bucket_fail; STATNODE_ULONG(zap_and_exit_bucket_fail,
"Number of times zap_and_exit failed to lock");
+static long zap_and_exit_bucket_fail2; STATNODE_ULONG(zap_and_exit_bucket_fail2,
+ "Number of times zap_and_exit failed to lock");
static long cache_lock_vnodes_cel_3_failures;
STATNODE_ULONG(cache_lock_vnodes_cel_3_failures,
"Number of times 3-way vnode locking failed");
SYSCTL_INT(_vfs_cache, OID_AUTO, yield, CTLFLAG_RD, &cache_yield, 0,
"Number of times cache called yield");
-static void
+static void __noinline
cache_maybe_yield(void)
{
#define cache_assert_bucket_locked(x, y) do { } while (0)
#endif
-#define cache_sort(x, y) _cache_sort((void **)(x), (void **)(y))
+#define cache_sort_vnodes(x, y) _cache_sort_vnodes((void **)(x), (void **)(y))
static void
-_cache_sort(void **p1, void **p2)
+_cache_sort_vnodes(void **p1, void **p2)
{
void *tmp;
+ MPASS(*p1 != NULL || *p2 != NULL);
+
if (*p1 > *p2) {
tmp = *p2;
*p2 = *p1;
cache_trylock_vnodes(struct mtx *vlp1, struct mtx *vlp2)
{
- cache_sort(&vlp1, &vlp2);
- MPASS(vlp2 != NULL);
+ cache_sort_vnodes(&vlp1, &vlp2);
if (vlp1 != NULL) {
if (!mtx_trylock(vlp1))
mtx_unlock(*vlpp);
*vlpp = NULL;
}
- cache_sort(&vlp1, &vlp2);
+ cache_sort_vnodes(&vlp1, &vlp2);
if (vlp1 == pvlp) {
mtx_lock(vlp2);
to_unlock = vlp2;
return (false);
}
-static int
+static int __noinline
cache_zap_locked_vnode(struct namecache *ncp, struct vnode *vp)
{
struct mtx *pvlp, *vlp1, *vlp2, *to_unlock;
blp = NCP2BUCKETLOCK(ncp);
vlp1 = VP2VNODELOCK(ncp->nc_dvp);
vlp2 = VP2VNODELOCK(ncp->nc_vp);
- cache_sort(&vlp1, &vlp2);
+ cache_sort_vnodes(&vlp1, &vlp2);
if (vlp1 == pvlp) {
mtx_lock(vlp2);
to_unlock = vlp2;
vlp = NULL;
if (!(ncp->nc_flag & NCF_NEGATIVE))
vlp = VP2VNODELOCK(ncp->nc_vp);
- cache_sort(&dvlp, &vlp);
+ cache_sort_vnodes(&dvlp, &vlp);
if (*vlpp1 == dvlp && *vlpp2 == vlp) {
cache_zap_locked(ncp, false);
goto out_no_entry;
}
- counter_u64_add(numposzaps, 1);
-
error = cache_zap_wlocked_bucket(ncp, blp);
- if (error != 0) {
+ if (__predict_false(error != 0)) {
zap_and_exit_bucket_fail++;
cache_maybe_yield();
goto retry;
}
+ counter_u64_add(numposzaps, 1);
cache_free(ncp);
return (0);
out_no_entry:
error = cache_zap_rlocked_bucket(ncp, blp);
else
error = cache_zap_locked_vnode(ncp, dvp);
- if (error != 0) {
- zap_and_exit_bucket_fail++;
+ if (__predict_false(error != 0)) {
+ zap_and_exit_bucket_fail2++;
cache_maybe_yield();
goto retry;
}
vlp1 = VP2VNODELOCK(vp);
vlp2 = VP2VNODELOCK(dvp);
- cache_sort(&vlp1, &vlp2);
+ cache_sort_vnodes(&vlp1, &vlp2);
if (vlp1 != NULL) {
mtx_lock(vlp1);
MPASS(cel->blp[0] == NULL);
MPASS(cel->blp[1] == NULL);
- cache_sort(&blp1, &blp2);
+ cache_sort_vnodes(&blp1, &blp2);
if (blp1 != NULL) {
rw_wlock(blp1);