void prune_dcache(int count) { spin_lock(&dcache_lock); for (;;) { struct dentry *dentry; struct list_head *tmp; tmp = dentry_unused.prev; if (tmp == &dentry_unused) break; list_del_init(tmp); dentry = list_entry(tmp, struct dentry, d_lru); /* If the dentry was recently referenced, don't free it. */ if (dentry->d_vfs_flags & DCACHE_REFERENCED) { dentry->d_vfs_flags &= ~DCACHE_REFERENCED; list_add(&dentry->d_lru, &dentry_unused); continue; } dentry_stat.nr_unused--; /* Unused dentry with a count? */ if (atomic_read(&dentry->d_count)) BUG(); prune_one_dentry(dentry); if (!--count) break; } spin_unlock(&dcache_lock); }
Z "lock breaking":
void prune_dcache(int count) { DEFINE_RESCHED_COUNT; redo: spin_lock(&dcache_lock); for (;;) { struct dentry *dentry; struct list_head *tmp; if (TEST_RESCHED_COUNT(100)) { RESET_RESCHED_COUNT(); if (conditional_schedule_needed()) { spin_unlock(&dcache_lock); unconditional_schedule(); goto redo; } } tmp = dentry_unused.prev; if (tmp == &dentry_unused) break; list_del_init(tmp); dentry = list_entry(tmp, struct dentry, d_lru); /* If the dentry was recently referenced, don't free it. */ if (dentry->d_vfs_flags & DCACHE_REFERENCED) { dentry->d_vfs_flags &= ~DCACHE_REFERENCED; list_add(&dentry->d_lru, &dentry_unused); continue; } dentry_stat.nr_unused--; /* Unused dentry with a count? */ if (atomic_read(&dentry->d_count)) BUG(); prune_one_dentry(dentry); if (!--count) break; } spin_unlock(&dcache_lock); }