2005-04-16 22:20:36 +00:00
|
|
|
#ifndef __NET_GEN_STATS_H
|
|
|
|
#define __NET_GEN_STATS_H
|
|
|
|
|
|
|
|
#include <linux/gen_stats.h>
|
|
|
|
#include <linux/socket.h>
|
|
|
|
#include <linux/rtnetlink.h>
|
|
|
|
#include <linux/pkt_sched.h>
|
|
|
|
|
2014-09-28 18:52:56 +00:00
|
|
|
struct gnet_stats_basic_cpu {
|
|
|
|
struct gnet_stats_basic_packed bstats;
|
|
|
|
struct u64_stats_sync syncp;
|
|
|
|
};
|
|
|
|
|
2009-11-03 03:26:03 +00:00
|
|
|
struct gnet_dump {
|
2005-04-16 22:20:36 +00:00
|
|
|
spinlock_t * lock;
|
|
|
|
struct sk_buff * skb;
|
2008-01-23 06:11:17 +00:00
|
|
|
struct nlattr * tail;
|
2005-04-16 22:20:36 +00:00
|
|
|
|
2011-03-31 01:57:33 +00:00
|
|
|
/* Backward compatibility */
|
2005-04-16 22:20:36 +00:00
|
|
|
int compat_tc_stats;
|
|
|
|
int compat_xstats;
|
|
|
|
void * xstats;
|
|
|
|
int xstats_len;
|
|
|
|
struct tc_stats tc_stats;
|
|
|
|
};
|
|
|
|
|
2013-09-20 18:23:26 +00:00
|
|
|
int gnet_stats_start_copy(struct sk_buff *skb, int type, spinlock_t *lock,
|
|
|
|
struct gnet_dump *d);
|
|
|
|
|
|
|
|
int gnet_stats_start_copy_compat(struct sk_buff *skb, int type,
|
|
|
|
int tc_stats_type, int xstats_type,
|
2005-04-16 22:20:36 +00:00
|
|
|
spinlock_t *lock, struct gnet_dump *d);
|
|
|
|
|
2013-09-20 18:23:26 +00:00
|
|
|
int gnet_stats_copy_basic(struct gnet_dump *d,
|
2014-09-28 18:52:56 +00:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu,
|
2013-09-20 18:23:26 +00:00
|
|
|
struct gnet_stats_basic_packed *b);
|
2014-09-28 18:52:56 +00:00
|
|
|
void __gnet_stats_copy_basic(struct gnet_stats_basic_packed *bstats,
|
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu,
|
|
|
|
struct gnet_stats_basic_packed *b);
|
2013-09-20 18:23:26 +00:00
|
|
|
int gnet_stats_copy_rate_est(struct gnet_dump *d,
|
|
|
|
const struct gnet_stats_basic_packed *b,
|
|
|
|
struct gnet_stats_rate_est64 *r);
|
2014-09-28 18:53:57 +00:00
|
|
|
int gnet_stats_copy_queue(struct gnet_dump *d,
|
2014-09-28 18:54:24 +00:00
|
|
|
struct gnet_stats_queue __percpu *cpu_q,
|
|
|
|
struct gnet_stats_queue *q, __u32 qlen);
|
2013-09-20 18:23:26 +00:00
|
|
|
int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len);
|
|
|
|
|
|
|
|
int gnet_stats_finish_copy(struct gnet_dump *d);
|
|
|
|
|
|
|
|
int gen_new_estimator(struct gnet_stats_basic_packed *bstats,
|
2014-09-28 18:52:56 +00:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu_bstats,
|
2013-09-20 18:23:26 +00:00
|
|
|
struct gnet_stats_rate_est64 *rate_est,
|
|
|
|
spinlock_t *stats_lock, struct nlattr *opt);
|
|
|
|
void gen_kill_estimator(struct gnet_stats_basic_packed *bstats,
|
|
|
|
struct gnet_stats_rate_est64 *rate_est);
|
|
|
|
int gen_replace_estimator(struct gnet_stats_basic_packed *bstats,
|
2014-09-28 18:52:56 +00:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu_bstats,
|
2013-09-20 18:23:26 +00:00
|
|
|
struct gnet_stats_rate_est64 *rate_est,
|
|
|
|
spinlock_t *stats_lock, struct nlattr *opt);
|
|
|
|
bool gen_estimator_active(const struct gnet_stats_basic_packed *bstats,
|
|
|
|
const struct gnet_stats_rate_est64 *rate_est);
|
2005-04-16 22:20:36 +00:00
|
|
|
#endif
|