summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndy Wingo <wingo@pobox.com>2012-02-19 20:34:26 +0100
committerAndy Wingo <wingo@pobox.com>2012-02-19 20:46:14 +0100
commit7932759fc751393155ed80a765d89fefb758c893 (patch)
tree1df3ba832def49de3c8ce54c24db0711001b988b
parent917b0e72f72944ecc6af48f43604593974b752de (diff)
downloadguile-7932759fc751393155ed80a765d89fefb758c893.tar.gz
better hysteresis in weak-set, weak-table
* libguile/weak-set.c: * libguile/weak-table.c: Attempt to avoid bouncing between sizes when growing the vector causes elements to be removed from the set/table.
-rw-r--r--libguile/weak-set.c38
-rw-r--r--libguile/weak-table.c38
2 files changed, 74 insertions, 2 deletions
diff --git a/libguile/weak-set.c b/libguile/weak-set.c
index a1ae4ea6b..476e88627 100644
--- a/libguile/weak-set.c
+++ b/libguile/weak-set.c
@@ -308,6 +308,42 @@ compute_size_index (scm_t_weak_set *set)
return i;
}
+static int
+is_acceptable_size_index (scm_t_weak_set *set, int size_index)
+{
+ int computed = compute_size_index (set);
+
+ if (size_index == computed)
+ /* We were going to grow or shrink, and allocating the new vector
+ didn't change the target size. */
+ return 1;
+
+ if (size_index == computed + 1)
+ {
+ /* We were going to enlarge the set, but allocating the new
+ vector finalized some objects, making an enlargement
+ unnecessary. It might still be a good idea to use the larger
+ set, though. (This branch also gets hit if, while allocating
+ the vector, some other thread was actively removing items from
+ the set. That is less likely, though.) */
+ unsigned long new_lower = hashset_size[size_index] / 5;
+
+ return set->size > new_lower;
+ }
+
+ if (size_index == computed - 1)
+ {
+ /* We were going to shrink the set, but when we dropped the lock
+ to allocate the new vector, some other thread added elements to
+ the set. */
+ return 0;
+ }
+
+ /* The computed size differs from our newly allocated size by more
+ than one size index -- recalculate. */
+ return 0;
+}
+
static void
resize_set (scm_t_weak_set *set)
{
@@ -328,7 +364,7 @@ resize_set (scm_t_weak_set *set)
"weak set");
scm_i_pthread_mutex_unlock (&set->lock);
}
- while (new_size_index != compute_size_index (set));
+ while (!is_acceptable_size_index (set, new_size_index));
old_entries = set->entries;
old_size = set->size;
diff --git a/libguile/weak-table.c b/libguile/weak-table.c
index 3d453bafd..d0d8efbb4 100644
--- a/libguile/weak-table.c
+++ b/libguile/weak-table.c
@@ -431,6 +431,42 @@ compute_size_index (scm_t_weak_table *table)
return i;
}
+static int
+is_acceptable_size_index (scm_t_weak_table *table, int size_index)
+{
+ int computed = compute_size_index (table);
+
+ if (size_index == computed)
+ /* We were going to grow or shrink, and allocating the new vector
+ didn't change the target size. */
+ return 1;
+
+ if (size_index == computed + 1)
+ {
+ /* We were going to enlarge the table, but allocating the new
+ vector finalized some objects, making an enlargement
+ unnecessary. It might still be a good idea to use the larger
+ table, though. (This branch also gets hit if, while allocating
+ the vector, some other thread was actively removing items from
+ the table. That is less likely, though.) */
+ unsigned long new_lower = hashtable_size[size_index] / 5;
+
+ return table->size > new_lower;
+ }
+
+ if (size_index == computed - 1)
+ {
+ /* We were going to shrink the table, but when we dropped the lock
+ to allocate the new vector, some other thread added elements to
+ the table. */
+ return 0;
+ }
+
+ /* The computed size differs from our newly allocated size by more
+ than one size index -- recalculate. */
+ return 0;
+}
+
static void
resize_table (scm_t_weak_table *table)
{
@@ -450,7 +486,7 @@ resize_table (scm_t_weak_table *table)
new_entries = allocate_entries (new_size, table->kind);
scm_i_pthread_mutex_unlock (&table->lock);
}
- while (new_size_index != compute_size_index (table));
+ while (!is_acceptable_size_index (table, new_size_index));
old_entries = table->entries;
old_size = table->size;