|
@@ -52,31 +52,29 @@ static void clk_summary_show_subtree(struct seq_file *s, struct clk *c,
|
|
|
int level)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
if (!c)
|
|
|
return;
|
|
|
|
|
|
clk_summary_show_one(s, c, level);
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &c->children, child_node)
|
|
|
+ hlist_for_each_entry(child, &c->children, child_node)
|
|
|
clk_summary_show_subtree(s, child, level + 1);
|
|
|
}
|
|
|
|
|
|
static int clk_summary_show(struct seq_file *s, void *data)
|
|
|
{
|
|
|
struct clk *c;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
seq_printf(s, " clock enable_cnt prepare_cnt rate\n");
|
|
|
seq_printf(s, "---------------------------------------------------------------------\n");
|
|
|
|
|
|
mutex_lock(&prepare_lock);
|
|
|
|
|
|
- hlist_for_each_entry(c, tmp, &clk_root_list, child_node)
|
|
|
+ hlist_for_each_entry(c, &clk_root_list, child_node)
|
|
|
clk_summary_show_subtree(s, c, 0);
|
|
|
|
|
|
- hlist_for_each_entry(c, tmp, &clk_orphan_list, child_node)
|
|
|
+ hlist_for_each_entry(c, &clk_orphan_list, child_node)
|
|
|
clk_summary_show_subtree(s, c, 0);
|
|
|
|
|
|
mutex_unlock(&prepare_lock);
|
|
@@ -111,14 +109,13 @@ static void clk_dump_one(struct seq_file *s, struct clk *c, int level)
|
|
|
static void clk_dump_subtree(struct seq_file *s, struct clk *c, int level)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
if (!c)
|
|
|
return;
|
|
|
|
|
|
clk_dump_one(s, c, level);
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &c->children, child_node) {
|
|
|
+ hlist_for_each_entry(child, &c->children, child_node) {
|
|
|
seq_printf(s, ",");
|
|
|
clk_dump_subtree(s, child, level + 1);
|
|
|
}
|
|
@@ -129,21 +126,20 @@ static void clk_dump_subtree(struct seq_file *s, struct clk *c, int level)
|
|
|
static int clk_dump(struct seq_file *s, void *data)
|
|
|
{
|
|
|
struct clk *c;
|
|
|
- struct hlist_node *tmp;
|
|
|
bool first_node = true;
|
|
|
|
|
|
seq_printf(s, "{");
|
|
|
|
|
|
mutex_lock(&prepare_lock);
|
|
|
|
|
|
- hlist_for_each_entry(c, tmp, &clk_root_list, child_node) {
|
|
|
+ hlist_for_each_entry(c, &clk_root_list, child_node) {
|
|
|
if (!first_node)
|
|
|
seq_printf(s, ",");
|
|
|
first_node = false;
|
|
|
clk_dump_subtree(s, c, 0);
|
|
|
}
|
|
|
|
|
|
- hlist_for_each_entry(c, tmp, &clk_orphan_list, child_node) {
|
|
|
+ hlist_for_each_entry(c, &clk_orphan_list, child_node) {
|
|
|
seq_printf(s, ",");
|
|
|
clk_dump_subtree(s, c, 0);
|
|
|
}
|
|
@@ -222,7 +218,6 @@ out:
|
|
|
static int clk_debug_create_subtree(struct clk *clk, struct dentry *pdentry)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
- struct hlist_node *tmp;
|
|
|
int ret = -EINVAL;;
|
|
|
|
|
|
if (!clk || !pdentry)
|
|
@@ -233,7 +228,7 @@ static int clk_debug_create_subtree(struct clk *clk, struct dentry *pdentry)
|
|
|
if (ret)
|
|
|
goto out;
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node)
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node)
|
|
|
clk_debug_create_subtree(child, clk->dentry);
|
|
|
|
|
|
ret = 0;
|
|
@@ -299,7 +294,6 @@ out:
|
|
|
static int __init clk_debug_init(void)
|
|
|
{
|
|
|
struct clk *clk;
|
|
|
- struct hlist_node *tmp;
|
|
|
struct dentry *d;
|
|
|
|
|
|
rootdir = debugfs_create_dir("clk", NULL);
|
|
@@ -324,10 +318,10 @@ static int __init clk_debug_init(void)
|
|
|
|
|
|
mutex_lock(&prepare_lock);
|
|
|
|
|
|
- hlist_for_each_entry(clk, tmp, &clk_root_list, child_node)
|
|
|
+ hlist_for_each_entry(clk, &clk_root_list, child_node)
|
|
|
clk_debug_create_subtree(clk, rootdir);
|
|
|
|
|
|
- hlist_for_each_entry(clk, tmp, &clk_orphan_list, child_node)
|
|
|
+ hlist_for_each_entry(clk, &clk_orphan_list, child_node)
|
|
|
clk_debug_create_subtree(clk, orphandir);
|
|
|
|
|
|
inited = 1;
|
|
@@ -345,13 +339,12 @@ static inline int clk_debug_register(struct clk *clk) { return 0; }
|
|
|
static void clk_disable_unused_subtree(struct clk *clk)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
- struct hlist_node *tmp;
|
|
|
unsigned long flags;
|
|
|
|
|
|
if (!clk)
|
|
|
goto out;
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node)
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node)
|
|
|
clk_disable_unused_subtree(child);
|
|
|
|
|
|
spin_lock_irqsave(&enable_lock, flags);
|
|
@@ -384,14 +377,13 @@ out:
|
|
|
static int clk_disable_unused(void)
|
|
|
{
|
|
|
struct clk *clk;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
mutex_lock(&prepare_lock);
|
|
|
|
|
|
- hlist_for_each_entry(clk, tmp, &clk_root_list, child_node)
|
|
|
+ hlist_for_each_entry(clk, &clk_root_list, child_node)
|
|
|
clk_disable_unused_subtree(clk);
|
|
|
|
|
|
- hlist_for_each_entry(clk, tmp, &clk_orphan_list, child_node)
|
|
|
+ hlist_for_each_entry(clk, &clk_orphan_list, child_node)
|
|
|
clk_disable_unused_subtree(clk);
|
|
|
|
|
|
mutex_unlock(&prepare_lock);
|
|
@@ -484,12 +476,11 @@ static struct clk *__clk_lookup_subtree(const char *name, struct clk *clk)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
struct clk *ret;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
if (!strcmp(clk->name, name))
|
|
|
return clk;
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node) {
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node) {
|
|
|
ret = __clk_lookup_subtree(name, child);
|
|
|
if (ret)
|
|
|
return ret;
|
|
@@ -502,20 +493,19 @@ struct clk *__clk_lookup(const char *name)
|
|
|
{
|
|
|
struct clk *root_clk;
|
|
|
struct clk *ret;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
if (!name)
|
|
|
return NULL;
|
|
|
|
|
|
/* search the 'proper' clk tree first */
|
|
|
- hlist_for_each_entry(root_clk, tmp, &clk_root_list, child_node) {
|
|
|
+ hlist_for_each_entry(root_clk, &clk_root_list, child_node) {
|
|
|
ret = __clk_lookup_subtree(name, root_clk);
|
|
|
if (ret)
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
/* if not found, then search the orphan tree */
|
|
|
- hlist_for_each_entry(root_clk, tmp, &clk_orphan_list, child_node) {
|
|
|
+ hlist_for_each_entry(root_clk, &clk_orphan_list, child_node) {
|
|
|
ret = __clk_lookup_subtree(name, root_clk);
|
|
|
if (ret)
|
|
|
return ret;
|
|
@@ -812,7 +802,6 @@ static void __clk_recalc_rates(struct clk *clk, unsigned long msg)
|
|
|
{
|
|
|
unsigned long old_rate;
|
|
|
unsigned long parent_rate = 0;
|
|
|
- struct hlist_node *tmp;
|
|
|
struct clk *child;
|
|
|
|
|
|
old_rate = clk->rate;
|
|
@@ -832,7 +821,7 @@ static void __clk_recalc_rates(struct clk *clk, unsigned long msg)
|
|
|
if (clk->notifier_count && msg)
|
|
|
__clk_notify(clk, msg, old_rate, clk->rate);
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node)
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node)
|
|
|
__clk_recalc_rates(child, msg);
|
|
|
}
|
|
|
|
|
@@ -878,7 +867,6 @@ EXPORT_SYMBOL_GPL(clk_get_rate);
|
|
|
*/
|
|
|
static int __clk_speculate_rates(struct clk *clk, unsigned long parent_rate)
|
|
|
{
|
|
|
- struct hlist_node *tmp;
|
|
|
struct clk *child;
|
|
|
unsigned long new_rate;
|
|
|
int ret = NOTIFY_DONE;
|
|
@@ -895,7 +883,7 @@ static int __clk_speculate_rates(struct clk *clk, unsigned long parent_rate)
|
|
|
if (ret == NOTIFY_BAD)
|
|
|
goto out;
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node) {
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node) {
|
|
|
ret = __clk_speculate_rates(child, new_rate);
|
|
|
if (ret == NOTIFY_BAD)
|
|
|
break;
|
|
@@ -908,11 +896,10 @@ out:
|
|
|
static void clk_calc_subtree(struct clk *clk, unsigned long new_rate)
|
|
|
{
|
|
|
struct clk *child;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
clk->new_rate = new_rate;
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node) {
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node) {
|
|
|
if (child->ops->recalc_rate)
|
|
|
child->new_rate = child->ops->recalc_rate(child->hw, new_rate);
|
|
|
else
|
|
@@ -983,7 +970,6 @@ out:
|
|
|
*/
|
|
|
static struct clk *clk_propagate_rate_change(struct clk *clk, unsigned long event)
|
|
|
{
|
|
|
- struct hlist_node *tmp;
|
|
|
struct clk *child, *fail_clk = NULL;
|
|
|
int ret = NOTIFY_DONE;
|
|
|
|
|
@@ -996,7 +982,7 @@ static struct clk *clk_propagate_rate_change(struct clk *clk, unsigned long even
|
|
|
fail_clk = clk;
|
|
|
}
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node) {
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node) {
|
|
|
clk = clk_propagate_rate_change(child, event);
|
|
|
if (clk)
|
|
|
fail_clk = clk;
|
|
@@ -1014,7 +1000,6 @@ static void clk_change_rate(struct clk *clk)
|
|
|
struct clk *child;
|
|
|
unsigned long old_rate;
|
|
|
unsigned long best_parent_rate = 0;
|
|
|
- struct hlist_node *tmp;
|
|
|
|
|
|
old_rate = clk->rate;
|
|
|
|
|
@@ -1032,7 +1017,7 @@ static void clk_change_rate(struct clk *clk)
|
|
|
if (clk->notifier_count && old_rate != clk->rate)
|
|
|
__clk_notify(clk, POST_RATE_CHANGE, old_rate, clk->rate);
|
|
|
|
|
|
- hlist_for_each_entry(child, tmp, &clk->children, child_node)
|
|
|
+ hlist_for_each_entry(child, &clk->children, child_node)
|
|
|
clk_change_rate(child);
|
|
|
}
|
|
|
|
|
@@ -1348,7 +1333,7 @@ int __clk_init(struct device *dev, struct clk *clk)
|
|
|
{
|
|
|
int i, ret = 0;
|
|
|
struct clk *orphan;
|
|
|
- struct hlist_node *tmp, *tmp2;
|
|
|
+ struct hlist_node *tmp2;
|
|
|
|
|
|
if (!clk)
|
|
|
return -EINVAL;
|
|
@@ -1448,7 +1433,7 @@ int __clk_init(struct device *dev, struct clk *clk)
|
|
|
* walk the list of orphan clocks and reparent any that are children of
|
|
|
* this clock
|
|
|
*/
|
|
|
- hlist_for_each_entry_safe(orphan, tmp, tmp2, &clk_orphan_list, child_node) {
|
|
|
+ hlist_for_each_entry_safe(orphan, tmp2, &clk_orphan_list, child_node) {
|
|
|
if (orphan->ops->get_parent) {
|
|
|
i = orphan->ops->get_parent(orphan->hw);
|
|
|
if (!strcmp(clk->name, orphan->parent_names[i]))
|