import mars-30.tgz

This commit is contained in:
Thomas Schoebel-Theuer 2010-08-08 10:03:42 +01:00
parent 23b4ae2f01
commit 332df6f7ca
11 changed files with 194 additions and 42 deletions

51
brick.c
View File

@ -5,6 +5,7 @@
#include <linux/string.h>
//#define BRICK_DEBUGGING
#define USE_FREELIST
#define _STRATEGY
#define BRICK_OBJ_NR /*empty => leads to an open array */
@ -360,10 +361,14 @@ int default_init_object_layout(struct generic_output *output, struct generic_obj
object_layout->aspect_layouts = data;
object_layout->object_type = object_type;
object_layout->init_data = output;
object_layout->aspect_count = 0;
object_layout->aspect_max = aspect_max;
object_layout->object_size = object_type->default_size;
atomic_set(&object_layout->alloc_count, 0);
atomic_set(&object_layout->free_count, 0);
spin_lock_init(&object_layout->free_lock);
object_layout->free_list = NULL;
object_layout->module_name = module_name;
status = output->ops->make_object_layout(output, object_layout);
@ -446,23 +451,40 @@ EXPORT_SYMBOL_GPL(default_make_object_layout);
struct generic_object *alloc_generic(struct generic_object_layout *object_layout)
{
void *data;
struct generic_object *object;
struct generic_object *object = object_layout->free_list;
if (object) {
unsigned long flags;
traced_lock(&object_layout->free_lock, flags);
object = object_layout->free_list;
if (object) {
object_layout->free_list = *(struct generic_object**)object;
*(struct generic_object**)object = NULL;
traced_unlock(&object_layout->free_lock, flags);
atomic_dec(&object_layout->free_count);
data = object;
goto ok;
}
traced_unlock(&object_layout->free_lock, flags);
}
data = kzalloc(object_layout->object_size, GFP_MARS);
if (unlikely(!data))
goto err;
atomic_inc(&object_layout->alloc_count);
ok:
object = generic_construct(data, object_layout);
if (unlikely(!object))
goto err_free;
atomic_inc(&object_layout->alloc_count);
#if 1
{
int count = atomic_read(&object_layout->alloc_count);
if (count >= object_layout->last_count + 1000) {
object_layout->last_count = count;
BRICK_INF("pool %s/%p/%s reaching %d\n", object_layout->object_type->object_type_name, object_layout, object_layout->module_name, count);
BRICK_INF("pool %s/%p/%s alloc=%d free=%d\n", object_layout->object_type->object_type_name, object_layout, object_layout->module_name, count, atomic_read(&object_layout->free_count));
}
}
#endif
@ -478,12 +500,31 @@ EXPORT_SYMBOL_GPL(alloc_generic);
void free_generic(struct generic_object *object)
{
struct generic_object_layout *object_layout;
if (unlikely(!object)) {
BRICK_ERR("free_generic on NULL object\n");
return;
}
if (likely(object->object_layout))
atomic_dec(&object->object_layout->alloc_count);
object_layout = object->object_layout;
if (likely(object_layout)) {
unsigned long flags;
generic_destruct(object);
#ifdef USE_FREELIST
memset(object, 0, object_layout->object_size);
atomic_inc(&object_layout->free_count);
traced_lock(&object_layout->free_lock, flags);
*(struct generic_object**)object = object_layout->free_list;
object_layout->free_list = object;
traced_unlock(&object_layout->free_lock, flags);
return;
#endif
atomic_dec(&object_layout->alloc_count);
}
kfree(object);
}

46
brick.h
View File

@ -38,8 +38,9 @@ struct generic_aspect;
#define GENERIC_ASPECT_TYPE(BRICK) \
char *aspect_type_name; \
const struct generic_object_type *object_type; \
int aspect_size; \
int (*init_fn)(struct generic_aspect *ini, void *data); \
int aspect_size; \
int (*init_fn)(struct generic_aspect *ini, void *data); \
void (*exit_fn)(struct generic_aspect *ini, void *data); \
struct generic_aspect_type {
GENERIC_ASPECT_TYPE(generic);
@ -58,6 +59,8 @@ struct generic_aspect_layout {
char *object_type_name; \
int default_size; \
int brick_obj_nr; \
int (*init_fn)(struct generic_aspect *ini, void *data); \
void (*exit_fn)(struct generic_aspect *ini, void *data); \
struct generic_object_type {
GENERIC_OBJECT_TYPE(generic);
@ -66,11 +69,15 @@ struct generic_object_type {
#define GENERIC_OBJECT_LAYOUT(BRICK) \
struct generic_aspect_layout **aspect_layouts; \
const struct generic_object_type *object_type; \
void *init_data; \
int aspect_count; \
int aspect_max; \
int object_size; \
int last_count; \
atomic_t alloc_count; \
atomic_t free_count; \
spinlock_t free_lock; \
struct generic_object *free_list; \
char *module_name; \
struct generic_object_layout {
@ -449,6 +456,12 @@ extern inline struct BRICK##_object *BRICK##_construct(void *data, struct BRICK#
int i; \
\
obj->object_layout = object_layout; \
if (object_layout->object_type->init_fn) { \
int status = object_layout->object_type->init_fn((void*)obj, object_layout->init_data); \
if (status < 0) { \
return NULL; \
} \
} \
for (i = 0; i < object_layout->aspect_count; i++) { \
struct generic_aspect_layout *aspect_layout; \
struct generic_aspect *aspect; \
@ -459,13 +472,34 @@ extern inline struct BRICK##_object *BRICK##_construct(void *data, struct BRICK#
aspect->object = (void*)obj; \
if (aspect_layout->aspect_type->init_fn) { \
int status = aspect_layout->aspect_type->init_fn((void*)aspect, aspect_layout->init_data); \
if (status) { \
if (status < 0) { \
return NULL; \
} \
} \
} \
return obj; \
} \
\
extern inline void BRICK##_destruct(struct BRICK##_object *obj) \
{ \
struct BRICK##_object_layout *object_layout = obj->object_layout; \
int i; \
\
if (object_layout->object_type->exit_fn) { \
object_layout->object_type->exit_fn((void*)obj, object_layout->init_data); \
} \
for (i = 0; i < object_layout->aspect_count; i++) { \
struct generic_aspect_layout *aspect_layout; \
struct generic_aspect *aspect; \
aspect_layout = object_layout->aspect_layouts[i]; \
if (!aspect_layout->aspect_type) \
continue; \
aspect = ((void*)obj) + aspect_layout->aspect_offset; \
if (aspect_layout->aspect_type->exit_fn) { \
aspect_layout->aspect_type->exit_fn((void*)aspect, aspect_layout->init_data); \
} \
} \
} \
#define GENERIC_ASPECT_FUNCTIONS(BRICK,PREFIX) \
\
@ -532,12 +566,14 @@ GENERIC_OBJECT_FUNCTIONS(generic);
} \
atomic_inc(&current->lock_count); \
(void)flags; \
spin_lock(spinlock); \
/*spin_lock(spinlock);*/ \
spin_lock_irqsave(spinlock, flags); \
} while (0)
# define traced_unlock(spinlock,flags) \
do { \
spin_unlock(spinlock); \
/*spin_unlock(spinlock);*/ \
spin_unlock_irqrestore(spinlock, flags); \
atomic_dec(&current->lock_count); \
} while (0)

19
mars.h
View File

@ -176,23 +176,24 @@ static const struct generic_aspect_type BRICK##_mars_ref_aspect_type = { \
.object_type = &mars_ref_type, \
.aspect_size = sizeof(struct BRICK##_mars_ref_aspect), \
.init_fn = BRICK##_mars_ref_aspect_init_fn, \
.exit_fn = BRICK##_mars_ref_aspect_exit_fn, \
}; \
\
static const struct generic_aspect_type *BRICK##_aspect_types[BRICK_OBJ_NR] = { \
[BRICK_OBJ_MARS_REF] = &BRICK##_mars_ref_aspect_type, \
}; \
#define _CHECK_SPIN(spinlock,OP,minval) \
#define _CHECK_ATOMIC(atom,OP,minval) \
do { \
int test = atomic_read(spinlock); \
int test = atomic_read(atom); \
if (test OP (minval)) { \
atomic_set(spinlock, minval); \
MARS_ERR("line %d spinlock " #spinlock " " #OP " " #minval "\n", __LINE__); \
atomic_set(atom, minval); \
MARS_ERR("line %d atom " #atom " " #OP " " #minval "\n", __LINE__); \
} \
} while (0)
#define CHECK_SPIN(spinlock,minval) \
_CHECK_SPIN(spinlock, <, minval)
#define CHECK_ATOMIC(atom,minval) \
_CHECK_ATOMIC(atom, <, minval)
static inline void mars_ref_attach_bio(struct mars_ref_object *mref, struct bio *bio)
{
@ -209,4 +210,10 @@ static inline void mars_ref_attach_bio(struct mars_ref_object *mref, struct bio
atomic_set(&mref->ref_count, 1);
}
#define CHECK_HEAD_EMPTY(head) \
if (!list_empty(head)) { \
INIT_LIST_HEAD(head); \
MARS_ERR("list_head " #head " (%p) not empty\n", head); \
} \
#endif

View File

@ -312,7 +312,7 @@ static int buf_ref_get(struct buf_output *output, struct mars_ref_object *mref)
#endif
/* Grab reference.
*/
_CHECK_SPIN(&mref->ref_count, !=, 0);
_CHECK_ATOMIC(&mref->ref_count, !=, 0);
atomic_inc(&mref->ref_count);
mref_a = buf_mars_ref_get_aspect(output, mref);
@ -335,7 +335,7 @@ again:
bf = hash_find(brick, ((unsigned int)base_pos) >> brick->backing_order);
if (bf) {
list_del_init(&bf->bf_lru_head);
CHECK_SPIN(&bf->bf_count, 0);
CHECK_ATOMIC(&bf->bf_count, 0);
atomic_inc(&bf->bf_count);
traced_unlock(&brick->brick_lock, flags);
@ -398,7 +398,7 @@ again:
mref->ref_data = bf->bf_data + base_offset;
CHECK_SPIN(&mref->ref_count, 1);
CHECK_ATOMIC(&mref->ref_count, 1);
return mref->ref_len;
@ -414,7 +414,7 @@ static void __bf_put(struct buf_head *bf)
unsigned long flags;
MARS_DBG("bf=%p bf_count=%d\n", bf, bf_count);
CHECK_SPIN(&bf->bf_count, 1);
CHECK_ATOMIC(&bf->bf_count, 1);
#ifdef FAST
if (!atomic_dec_and_test(&bf->bf_count))
@ -473,7 +473,7 @@ static void _buf_ref_put(struct buf_mars_ref_aspect *mref_a)
struct mars_ref_object *mref = mref_a->object;
struct buf_head *bf;
CHECK_SPIN(&mref->ref_count, 1);
CHECK_ATOMIC(&mref->ref_count, 1);
if (!atomic_dec_and_test(&mref->ref_count))
return;
@ -615,7 +615,7 @@ static int _buf_make_bios(struct buf_brick *brick, struct buf_head *bf, void *st
/* Remember the number of bios we are submitting.
*/
CHECK_SPIN(&bf->bf_bio_count, 0);
CHECK_ATOMIC(&bf->bf_bio_count, 0);
atomic_inc(&bf->bf_bio_count);
MARS_DBG("starting buf IO mref=%p bio=%p bf=%p bf_count=%d bf_bio_count=%d\n", mref, mref->orig_bio, bf, atomic_read(&bf->bf_count), atomic_read(&bf->bf_bio_count));
@ -670,7 +670,7 @@ static void _buf_bio_callback(struct bio *bio, int code)
bf->bf_bio_status = code;
}
CHECK_SPIN(&bf->bf_bio_count, 1);
CHECK_ATOMIC(&bf->bf_bio_count, 1);
if (!atomic_dec_and_test(&bf->bf_bio_count))
return;
@ -679,7 +679,7 @@ static void _buf_bio_callback(struct bio *bio, int code)
brick = bf->bf_brick;
// get an extra reference, to avoid freeing bf underneath during callbacks
CHECK_SPIN(&bf->bf_count, 1);
CHECK_ATOMIC(&bf->bf_count, 1);
atomic_inc(&bf->bf_count);
traced_lock(&bf->bf_lock, flags);
@ -759,7 +759,7 @@ static void _buf_bio_callback(struct bio *bio, int code)
MARS_ERR("endless loop 2\n");
}
#endif
CHECK_SPIN(&mref->ref_count, 1);
CHECK_ATOMIC(&mref->ref_count, 1);
/* It should be safe to do this without locking, because
* tmp is on the stack, so there is no concurrency.
*/
@ -810,7 +810,7 @@ static void buf_ref_io(struct buf_output *output, struct mars_ref_object *mref,
* This will be released later in _buf_bio_callback() after
* calling the callbacks.
*/
CHECK_SPIN(&mref->ref_count, 1);
CHECK_ATOMIC(&mref->ref_count, 1);
atomic_inc(&mref->ref_count);
bf = mref_a->rfa_bf;
@ -819,7 +819,7 @@ static void buf_ref_io(struct buf_output *output, struct mars_ref_object *mref,
goto callback;
}
CHECK_SPIN(&bf->bf_count, 1);
CHECK_ATOMIC(&bf->bf_count, 1);
if (rw != READ) {
if (unlikely(mref->ref_may_write == READ)) {
@ -950,6 +950,16 @@ static int buf_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void *_init_
return 0;
}
static void buf_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
struct buf_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
#if 1
CHECK_HEAD_EMPTY(&ini->rfa_pending_head);
CHECK_HEAD_EMPTY(&ini->tmp_head);
#endif
}
MARS_MAKE_STATICS(buf);
////////////////////// brick constructors / destructors ////////////////////

View File

@ -33,18 +33,21 @@ static void check_buf_endio(struct mars_ref_object *mref)
return;
}
traced_lock(&output->check_lock, flags);
if (mref_a->call_count-- < 0) {
mref_a->call_count = 0;
MARS_ERR("instance %d/%s: too many callbacks on %p\n", output->instance_nr, input->connect->type->type_name, mref);
}
#ifdef CHECK_LOCK
traced_lock(&output->check_lock, flags);
if (list_empty(&mref_a->mref_head)) {
MARS_ERR("instance %d/%s: list entry missing on %p\n", output->instance_nr, input->connect->type->type_name, mref);
}
list_del_init(&mref_a->mref_head);
traced_unlock(&output->check_lock, flags);
#endif
mref->cb_private = mref_a->old_private;
mref_a->last_jiffies = jiffies;
@ -83,6 +86,7 @@ static int check_watchdog(void *data)
msleep_interruptible(5000);
#ifdef CHECK_LOCK
traced_lock(&output->check_lock, flags);
now = jiffies;
@ -119,7 +123,9 @@ static int check_watchdog(void *data)
MARS_ERR("================================\n");
}
}
traced_unlock(&output->check_lock, flags);
#endif
}
return 0;
}
@ -156,16 +162,21 @@ static void check_ref_io(struct check_output *output, struct mars_ref_object *mr
return;
}
traced_lock(&output->check_lock, flags);
if (mref_a->call_count++ > 1) {
mref_a->call_count = 1;
MARS_ERR("instance %d/%s: multiple parallel calls on %p\n", output->instance_nr, input->connect->type->type_name, mref);
}
#ifdef CHECK_LOCK
traced_lock(&output->check_lock, flags);
if (!list_empty(&mref_a->mref_head)) {
list_del(&mref_a->mref_head);
MARS_ERR("instance %d/%s: list head not empty on %p\n", output->instance_nr, input->connect->type->type_name, mref);
}
list_add_tail(&mref_a->mref_head, &output->mref_anchor);
traced_unlock(&output->check_lock, flags);
#endif
if (mref->cb_ref_endio != check_buf_endio) {
mref_a->old_buf_endio = mref->cb_ref_endio;
mref->cb_ref_endio = check_buf_endio;
@ -173,7 +184,6 @@ static void check_ref_io(struct check_output *output, struct mars_ref_object *mr
mref->cb_private = output;
}
mref_a->last_jiffies = jiffies;
traced_unlock(&output->check_lock, flags);
GENERIC_INPUT_CALL(input, mars_ref_io, mref, rw);
}
@ -183,11 +193,25 @@ static void check_ref_io(struct check_output *output, struct mars_ref_object *mr
static int check_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void *_init_data)
{
struct check_mars_ref_aspect *ini = (void*)_ini;
#ifdef CHECK_LOCK
INIT_LIST_HEAD(&ini->mref_head);
#endif
ini->old_buf_endio = NULL;
ini->old_private = NULL;
ini->last_jiffies = jiffies;
ini->call_count = 0;
return 0;
}
static void check_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
struct check_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
#ifdef CHECK_LOCK
CHECK_HEAD_EMPTY(&ini->mref_head);
#endif
}
MARS_MAKE_STATICS(check);
////////////////////// brick constructors / destructors ////////////////////
@ -202,10 +226,12 @@ static int check_output_construct(struct check_output *output)
static int count = 0;
struct task_struct *watchdog;
spin_lock_init(&output->check_lock);
output->instance_nr = ++count;
#ifdef CHECK_LOCK
spin_lock_init(&output->check_lock);
INIT_LIST_HEAD(&output->mio_anchor);
INIT_LIST_HEAD(&output->mref_anchor);
#endif
watchdog = kthread_create(check_watchdog, output, "check_watchdog%d", output->instance_nr);
if (!IS_ERR(watchdog)) {
output->watchdog = watchdog;

View File

@ -2,9 +2,13 @@
#ifndef MARS_CHECK_H
#define MARS_CHECK_H
//#define CHECK_LOCK
struct check_mars_ref_aspect {
GENERIC_ASPECT(mars_ref);
#ifdef CHECK_LOCK
struct list_head mref_head;
#endif
void (*old_buf_endio)(struct mars_ref_object *mref);
void *old_private;
unsigned long last_jiffies;
@ -21,11 +25,13 @@ struct check_input {
struct check_output {
MARS_OUTPUT(check);
spinlock_t check_lock;
int instance_nr;
struct task_struct *watchdog;
#ifdef CHECK_LOCK
spinlock_t check_lock;
struct list_head mio_anchor;
struct list_head mref_anchor;
#endif
};
MARS_TYPES(check);

View File

@ -386,6 +386,15 @@ static int device_sio_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void
return 0;
}
static void device_sio_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
struct device_sio_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
#if 1
CHECK_HEAD_EMPTY(&ini->io_head);
#endif
}
MARS_MAKE_STATICS(device_sio);
////////////////////// brick constructors / destructors ////////////////////

View File

@ -48,10 +48,17 @@ static void dummy_ref_io(struct dummy_output *output, struct mars_ref_object *mr
static int dummy_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void *_init_data)
{
struct dummy_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
ini->my_own = 0;
return 0;
}
static void dummy_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
struct dummy_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
}
MARS_MAKE_STATICS(dummy);
////////////////////// brick constructors / destructors ////////////////////

View File

@ -136,6 +136,10 @@ static int if_device_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void *
return 0;
}
static void if_device_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
}
MARS_MAKE_STATICS(if_device);
//////////////////////// contructors / destructors ////////////////////////

View File

@ -80,7 +80,7 @@ void make_test_instance(void)
void connect(struct generic_input *a, struct generic_output *b)
{
int status;
#if 1
#if 0
struct generic_brick *tmp = brick(&check_brick_type);
status = generic_connect(a, tmp->outputs[0]);

View File

@ -52,7 +52,7 @@ static void _usebuf_origmref_endio(struct usebuf_output *output, struct mars_ref
MARS_DBG("origmref=%p subref_count=%d error=%d\n", origmref, atomic_read(&origmref_a->subref_count), origmref->cb_error);
CHECK_SPIN(&origmref_a->subref_count, 1);
CHECK_ATOMIC(&origmref_a->subref_count, 1);
if (!atomic_dec_and_test(&origmref_a->subref_count)) {
goto out;
}
@ -110,7 +110,7 @@ static void _usebuf_mref_endio(struct mars_ref_object *mref)
_usebuf_copy(mref_a, WRITE);
// grab extra reference
CHECK_SPIN(&origmref_a->subref_count, 1);
CHECK_ATOMIC(&origmref_a->subref_count, 1);
atomic_inc(&origmref_a->subref_count);
GENERIC_INPUT_CALL(input, mars_ref_io, mref, WRITE);
@ -131,8 +131,8 @@ static void _usebuf_mref_endio(struct mars_ref_object *mref)
}
}
CHECK_SPIN(&origmref_a->subref_count, 1);
CHECK_SPIN(&mref->ref_count, 1);
CHECK_ATOMIC(&origmref_a->subref_count, 1);
CHECK_ATOMIC(&mref->ref_count, 1);
status = mref->cb_error;
@ -167,7 +167,7 @@ static int usebuf_ref_get(struct usebuf_output *output, struct mars_ref_object *
static void usebuf_ref_put(struct usebuf_output *output, struct mars_ref_object *origmref)
{
CHECK_SPIN(&origmref->ref_count, 1);
CHECK_ATOMIC(&origmref->ref_count, 1);
if (!atomic_dec_and_test(&origmref->ref_count)) {
return;
}
@ -205,10 +205,10 @@ static void usebuf_ref_io(struct usebuf_output *output, struct mars_ref_object *
origmref->cb_error = 0;
// initial refcount: prevent intermediate drops
_CHECK_SPIN(&origmref->ref_count, !=, 1);
_CHECK_ATOMIC(&origmref->ref_count, !=, 1);
atomic_inc(&origmref->ref_count);
_CHECK_SPIN(&origmref_a->subref_count, !=, 0);
_CHECK_ATOMIC(&origmref_a->subref_count, !=, 0);
atomic_set(&origmref_a->subref_count, 1);
start_pos = ((loff_t)bio->bi_sector) << 9; // TODO: make dynamic
@ -278,7 +278,7 @@ static void usebuf_ref_io(struct usebuf_output *output, struct mars_ref_object *
}
// grab reference for each sub-IO
CHECK_SPIN(&origmref_a->subref_count, 1);
CHECK_ATOMIC(&origmref_a->subref_count, 1);
atomic_inc(&origmref_a->subref_count);
GENERIC_INPUT_CALL(input, mars_ref_io, mref, my_rw);
@ -325,6 +325,12 @@ static int usebuf_mars_ref_aspect_init_fn(struct generic_aspect *_ini, void *_in
return 0;
}
static void usebuf_mars_ref_aspect_exit_fn(struct generic_aspect *_ini, void *_init_data)
{
struct usebuf_mars_ref_aspect *ini = (void*)_ini;
(void)ini;
}
MARS_MAKE_STATICS(usebuf);
////////////////////// brick constructors / destructors ////////////////////