[alsa-devel] [PATCH v2 0/3] ASoC: add audio card creation from graph of ports in DT
This patchset adds a way to build audio cards from the description of a graph of ports in a devicetree.
v2: - move the soc-core and dt-card from the patchset "ASoC: tda998x: add a codec to the HDMI transmitter" - add DT documentation (Mark Brown)
Jean-Francois Moine (3): ASoC: core: export snd_soc_get_dai_name Documentation: of: Document audio graph bindings ASoC: add generic dt-card support
.../devicetree/bindings/sound/audio-interfaces.txt | 101 ++++++++ include/sound/soc.h | 2 + sound/soc/generic/Kconfig | 2 + sound/soc/generic/Makefile | 2 + sound/soc/generic/dt-card.c | 275 +++++++++++++++++++++ sound/soc/soc-core.c | 5 +- 6 files changed, 385 insertions(+), 2 deletions(-) create mode 100644 Documentation/devicetree/bindings/sound/audio-interfaces.txt create mode 100644 sound/soc/generic/dt-card.c
snd_soc_get_dai_name() may be used to define a sound card with a different syntax from the one of the simple-card.
Signed-off-by: Jean-Francois Moine moinejf@free.fr --- include/sound/soc.h | 2 ++ sound/soc/soc-core.c | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/include/sound/soc.h b/include/sound/soc.h index edd4a0a..7e783d6 100644 --- a/include/sound/soc.h +++ b/include/sound/soc.h @@ -1502,6 +1502,8 @@ int snd_soc_of_get_dai_name(struct device_node *of_node, int snd_soc_of_get_dai_link_codecs(struct device *dev, struct device_node *of_node, struct snd_soc_dai_link *dai_link); +int snd_soc_get_dai_name(struct of_phandle_args *args, + const char **dai_name);
#include <sound/soc-dai.h>
diff --git a/sound/soc/soc-core.c b/sound/soc/soc-core.c index ededb97..6a782ca 100644 --- a/sound/soc/soc-core.c +++ b/sound/soc/soc-core.c @@ -3418,8 +3418,8 @@ unsigned int snd_soc_of_parse_daifmt(struct device_node *np, } EXPORT_SYMBOL_GPL(snd_soc_of_parse_daifmt);
-static int snd_soc_get_dai_name(struct of_phandle_args *args, - const char **dai_name) +int snd_soc_get_dai_name(struct of_phandle_args *args, + const char **dai_name) { struct snd_soc_component *pos; int ret = -EPROBE_DEFER; @@ -3465,6 +3465,7 @@ static int snd_soc_get_dai_name(struct of_phandle_args *args, mutex_unlock(&client_mutex); return ret; } +EXPORT_SYMBOL_GPL(snd_soc_get_dai_name);
int snd_soc_of_get_dai_name(struct device_node *of_node, const char **dai_name)
This patch adds a generic way to create audio cards from a graph of ports defined in a DT.
The dt-card devices are created by audio controllers with themselves as the root of the graph and the sound cards are created according to the parameters found in the tree.
Signed-off-by: Jean-Francois Moine moinejf@free.fr --- sound/soc/generic/Kconfig | 2 + sound/soc/generic/Makefile | 2 + sound/soc/generic/dt-card.c | 275 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 279 insertions(+) create mode 100644 sound/soc/generic/dt-card.c
diff --git a/sound/soc/generic/Kconfig b/sound/soc/generic/Kconfig index 610f612..9c5e1e2 100644 --- a/sound/soc/generic/Kconfig +++ b/sound/soc/generic/Kconfig @@ -2,3 +2,5 @@ config SND_SIMPLE_CARD tristate "ASoC Simple sound card support" help This option enables generic simple sound card support +config SND_DT_CARD + tristate diff --git a/sound/soc/generic/Makefile b/sound/soc/generic/Makefile index 9c3b246..56834a9 100644 --- a/sound/soc/generic/Makefile +++ b/sound/soc/generic/Makefile @@ -1,3 +1,5 @@ snd-soc-simple-card-objs := simple-card.o +snd-soc-dt-card-objs := dt-card.o
obj-$(CONFIG_SND_SIMPLE_CARD) += snd-soc-simple-card.o +obj-$(CONFIG_SND_DT_CARD) += snd-soc-dt-card.o diff --git a/sound/soc/generic/dt-card.c b/sound/soc/generic/dt-card.c new file mode 100644 index 0000000..6a5de2f --- /dev/null +++ b/sound/soc/generic/dt-card.c @@ -0,0 +1,275 @@ +/* + * ALSA SoC DT based sound card support + * + * Copyright (C) 2015 Jean-Francois Moine + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include <linux/module.h> +#include <sound/soc.h> +#include <linux/of.h> +#include <linux/of_device.h> + +/* check if a node is an audio port */ +static int asoc_dt_card_is_audio_port(struct device_node *of_port) +{ + const char *name; + int ret; + + if (!of_port->name || + of_node_cmp(of_port->name, "port") != 0) + return 0; + ret = of_property_read_string(of_port, + "port-type", + &name); + if (!ret && + (strcmp(name, "i2s") == 0 || + strcmp(name, "spdif") == 0)) + return 1; + return 0; +} + +/* + * Get the DAI number from the DT by counting the audio ports + * of the remote device node (codec). + */ +static int asoc_dt_card_get_dai_number(struct device_node *of_codec, + struct device_node *of_remote_endpoint) +{ + struct device_node *of_port, *of_endpoint; + int ndai; + + ndai = 0; + for_each_child_of_node(of_codec, of_port) { + if (!asoc_dt_card_is_audio_port(of_port)) + continue; + for_each_child_of_node(of_port, of_endpoint) { + if (!of_endpoint->name || + of_node_cmp(of_endpoint->name, "endpoint") != 0) + continue; + if (of_endpoint == of_remote_endpoint) { + of_node_put(of_port); + of_node_put(of_endpoint); + return ndai; + } + } + ndai++; + } + return 0; /* should never be reached */ +} + +/* + * Parse a graph of audio ports + * @dev: Card device + * @of_cpu: Device node of the audio controller + * @card: Card definition + * + * Builds the DAI links of the card from the DT graph of audio ports + * starting from the audio controller. + * It does not handle the port groups. + * The CODEC device nodes in the DAI links must be dereferenced by the caller. + * + * Returns the number of DAI links or (< 0) on error + */ +static int asoc_dt_card_of_parse_graph(struct device *dev, + struct device_node *of_cpu, + struct snd_soc_card *card) +{ + struct device_node *of_codec, *of_port, *of_endpoint, + *of_remote_endpoint; + struct snd_soc_dai_link *link; + struct snd_soc_dai_link_component *component; + struct of_phandle_args args, args2; + int ret, ilink, icodec, nlinks, ncodecs; + + /* count the number of DAI links */ + nlinks = 0; + for_each_child_of_node(of_cpu, of_port) { + if (asoc_dt_card_is_audio_port(of_port)) + nlinks++; + } + + /* allocate the DAI link array */ + link = devm_kzalloc(dev, sizeof(*link) * nlinks, GFP_KERNEL); + if (!link) + return -ENOMEM; + card->dai_link = link; + + /* build the DAI links */ + ilink = 0; + args.np = of_cpu; + args.args_count = 1; + for_each_child_of_node(of_cpu, of_port) { + if (!asoc_dt_card_is_audio_port(of_port)) + continue; + + link->platform_of_node = + link->cpu_of_node = of_cpu; + args.args[0] = ilink; + ret = snd_soc_get_dai_name(&args, &link->cpu_dai_name); + if (ret) { + dev_err(dev, "no CPU DAI name for link %d!\n", + ilink); + continue; + } + + /* count the number of codecs of this DAI link */ + ncodecs = 0; + for_each_child_of_node(of_port, of_endpoint) { + if (of_parse_phandle(of_endpoint, + "remote-endpoint", 0)) + ncodecs++; + } + if (ncodecs == 0) + continue; + component = devm_kzalloc(dev, + sizeof(*component) * ncodecs, + GFP_KERNEL); + if (!component) + return -ENOMEM; + link->codecs = component; + + icodec = 0; + args2.args_count = 1; + for_each_child_of_node(of_port, of_endpoint) { + of_remote_endpoint = of_parse_phandle(of_endpoint, + "remote-endpoint", 0); + if (!of_remote_endpoint) + continue; + component->of_node = of_codec = + of_remote_endpoint->parent->parent; + args2.np = of_codec; + args2.args[0] = asoc_dt_card_get_dai_number(of_codec, + of_remote_endpoint); + ret = snd_soc_get_dai_name(&args2, + &component->dai_name); + if (ret) { + if (ret == -EPROBE_DEFER) { + card->num_links = ilink + 1; + link->num_codecs = icodec; + return ret; + } + dev_err(dev, + "no CODEC DAI name for link %d\n", + ilink); + continue; + } + of_node_get(of_codec); + + icodec++; + if (icodec >= ncodecs) + break; + component++; + } + if (icodec == 0) + continue; + link->num_codecs = icodec; + + ilink++; + if (ilink >= nlinks) + break; + link++; + } + card->num_links = ilink; + + return ilink; +} + +static void asoc_dt_card_unref(struct platform_device *pdev) +{ + struct snd_soc_card *card = platform_get_drvdata(pdev); + struct snd_soc_dai_link *link; + int nlinks, ncodecs; + + if (card) { + for (nlinks = 0, link = card->dai_link; + nlinks < card->num_links; + nlinks++, link++) { + for (ncodecs = 0; + ncodecs < link->num_codecs; + ncodecs++) + of_node_put(card->dai_link->codecs[ncodecs].of_node); + } + } +} + +/* + * The platform data contains the pointer to the device node + * which starts the description of the graph of the audio ports, + * This device node is usually the audio controller. + */ +static int asoc_dt_card_probe(struct platform_device *pdev) +{ + struct device_node **p_np = pdev->dev.platform_data; + struct device_node *of_cpu = *p_np; + struct snd_soc_card *card; + struct snd_soc_dai_link *link; + char *name; + int ret, i; + + card = devm_kzalloc(&pdev->dev, sizeof(*card), GFP_KERNEL); + if (!card) + return -ENOMEM; + ret = asoc_dt_card_of_parse_graph(&pdev->dev, of_cpu, card); + if (ret < 0) + goto err; + + /* fill the remaining values of the card */ + card->owner = THIS_MODULE; + card->dev = &pdev->dev; + card->name = "DT-card"; + for (i = 0, link = card->dai_link; + i < card->num_links; + i++, link++) { + name = devm_kzalloc(&pdev->dev, + strlen(link->cpu_dai_name) + + strlen(link->codecs[0].dai_name) + + 2, + GFP_KERNEL); + if (!name) { + ret = -ENOMEM; + goto err; + } + sprintf(name, "%s-%s", link->cpu_dai_name, + link->codecs[0].dai_name); + link->name = link->stream_name = name; + } + + card->dai_link->dai_fmt = + snd_soc_of_parse_daifmt(of_cpu, "dt-audio-card,", + NULL, NULL) & + ~SND_SOC_DAIFMT_MASTER_MASK; + + ret = devm_snd_soc_register_card(&pdev->dev, card); + if (ret >= 0) + return ret; + +err: + asoc_dt_card_unref(pdev); + return ret; +} + +static int asoc_dt_card_remove(struct platform_device *pdev) +{ + asoc_dt_card_unref(pdev); + snd_soc_unregister_card(platform_get_drvdata(pdev)); + return 0; +} + +static struct platform_driver asoc_dt_card = { + .driver = { + .name = "asoc-dt-card", + }, + .probe = asoc_dt_card_probe, + .remove = asoc_dt_card_remove, +}; + +module_platform_driver(asoc_dt_card); + +MODULE_ALIAS("platform:asoc-dt-card"); +MODULE_DESCRIPTION("ASoC DT Sound Card"); +MODULE_AUTHOR("Jean-Francois Moine moinejf@free.fr"); +MODULE_LICENSE("GPL");
On Wed, 21 Jan 2015 21:14:07 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
[...]
- card->dai_link->dai_fmt =
snd_soc_of_parse_daifmt(of_cpu, "dt-audio-card,",
NULL, NULL) &
~SND_SOC_DAIFMT_MASTER_MASK;
This one does not seem to be in the bindings documentation.
Sorry, I forgot to remove it from the patch.
BTW, the graph of port should also contain pieces of the audio specific hardware information as the ones found in the simple-card (clock, GPIO, ...). This information could be written as generic device node properties. i.e without any prefix.
I was also wondering about some of these properties, as widgets and routing. They seem to be software information and Linux specific. Must these properties appear in the DTs?
On Thu, Jan 22, 2015 at 09:07:23AM +0100, Jean-Francois Moine wrote:
I was also wondering about some of these properties, as widgets and routing. They seem to be software information and Linux specific. Must these properties appear in the DTs?
What's Linux specific about them? The widgets should all represent physical things that can be pointed at on the board and the routing reflects how the components are hooked up.
On 01/22/2015 09:07 AM, Jean-Francois Moine wrote:
On Wed, 21 Jan 2015 21:14:07 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
[...]
- card->dai_link->dai_fmt =
snd_soc_of_parse_daifmt(of_cpu, "dt-audio-card,",
NULL, NULL) &
~SND_SOC_DAIFMT_MASTER_MASK;
This one does not seem to be in the bindings documentation.
Sorry, I forgot to remove it from the patch.
Ah, too bad this was the part I was most interested in. I think that using the generic of graph framework as a unified way for expressing non-control links is a good idea, whether it be for audio, video or something else.
But I think there are some open questions that need to be address when coming up with a specification for audio so we do not have to write yet another incompatible DT spec in 3 months time.
One issue is how to deal with multi-point-to-multi-point links. I2S/TDM is a bus and can have more than one reader/writer.
The second issue is how to describe the clock and frame master relationships. Multiple different buses can share the same clock and frame generator. E.g. typically the capture and playback stream are linked in this way.
How are we going to handle bus specific properties. Properties which are neither a property of either of the endpoints on the link, but of the link itself.
BTW, the graph of port should also contain pieces of the audio specific hardware information as the ones found in the simple-card (clock, GPIO, ...). This information could be written as generic device node properties. i.e without any prefix.
I was also wondering about some of these properties, as widgets and routing. They seem to be software information and Linux specific. Must these properties appear in the DTs?
Well last time I checked the speaker on my board was hardware not software and wasn't Linux specific either ;) Those widgets and routing represent the (typically analog) audio fabric on the board and are part of the hardware description. This is not even ASoC or devicetree specific, e.g. HDA uses a similar concept where the BIOS provides a description of which pins of the audio CODEC is connected to which speaker, microphone, etc. And especially on embedded boards the audio fabric can become quite complex.
Your example is a relative simple one where you do not have any additional audio fabric on the board itself.
- Lars
On Thu, 22 Jan 2015 20:25:39 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/22/2015 09:07 AM, Jean-Francois Moine wrote:
On Wed, 21 Jan 2015 21:14:07 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
[...]
- card->dai_link->dai_fmt =
snd_soc_of_parse_daifmt(of_cpu, "dt-audio-card,",
NULL, NULL) &
~SND_SOC_DAIFMT_MASTER_MASK;
This one does not seem to be in the bindings documentation.
Sorry, I forgot to remove it from the patch.
Ah, too bad this was the part I was most interested in. I think that using
This code was a remainder of simple-card.
Setting the audio format in the CPU side of the link is of no interest when this format is the same for all CODECs. In this case, the audio controller may set itself this format when it creates its DAIs, and, then, the audio format is an audio controller property.
On the other hand, when the format depends on the remote endpoints, it should appear in the graph in the CODEC ports.
the generic of graph framework as a unified way for expressing non-control links is a good idea, whether it be for audio, video or something else.
We discussed about the graph of ports last year http://mailman.alsa-project.org/pipermail/alsa-devel/2014-January/070634.htm... but some pieces of software were lacking, as the multi-codec links.
But I think there are some open questions that need to be address when coming up with a specification for audio so we do not have to write yet another incompatible DT spec in 3 months time.
The DT should describe the hardware, and the simple-card mixes hardware and software. For example, the kirkwood controller may create 2 CPU DAIs. With the simple-card, the DT contains a number to reference these DAIs (for example, implicitly, <audio1 0> references the I2S output). So, what if the controller creates only one DAI, or, what if the FreeBSD/OpenBSD/.. driver does not set the same references to these DAIs? The graph of port fixes this problem.
More: a simple audio card may easily be created from a graph of ports as the simple-card does, but by the audio-controller (sorry, I also forgot the kirkwood patch for this in my previous patch request). In case of complex cards, the links and properties of this graph may also be used by board specific card devices.
One issue is how to deal with multi-point-to-multi-point links. I2S/TDM is a bus and can have more than one reader/writer.
The second issue is how to describe the clock and frame master relationships. Multiple different buses can share the same clock and frame generator. E.g. typically the capture and playback stream are linked in this way.
The ports and endpoints may contain properties to describe these configurations. Complex cases should be handled by specific card builders.
How are we going to handle bus specific properties. Properties which are neither a property of either of the endpoints on the link, but of the link itself.
This is already the case for the bus types of the kirkwood controller, I2S or S/PDIF. Such properties may appear in either local or remote port, or in both.
BTW, the graph of port should also contain pieces of the audio specific hardware information as the ones found in the simple-card (clock, GPIO, ...). This information could be written as generic device node properties. i.e without any prefix.
I was also wondering about some of these properties, as widgets and routing. They seem to be software information and Linux specific. Must these properties appear in the DTs?
Well last time I checked the speaker on my board was hardware not software and wasn't Linux specific either ;) Those widgets and routing represent the (typically analog) audio fabric on the board and are part of the hardware description. This is not even ASoC or devicetree specific, e.g. HDA uses a similar concept where the BIOS provides a description of which pins of the audio CODEC is connected to which speaker, microphone, etc. And especially on embedded boards the audio fabric can become quite complex.
OK. I looked if the widgets and routes could also be described in a graph, but, it complexifies the syntax. So, this information could have the same syntax as in the simple-card.
On the other hand, where would this information appear in the graph? As I understood, on card creation, the widgets and routes, which appear at the card level, redefine the CPU and CODEC DAI definitions.
With a DT graph, each CPU/CODEC would know exactly the widgets and routes it has to define.
Your example is a relative simple one where you do not have any additional audio fabric on the board itself.
Right, and that's why I'd be glad to have quickly something in the kernel. More properties could be added later as there would be requests.
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote: [...]
The DT should describe the hardware, and the simple-card mixes hardware and software. For example, the kirkwood controller may create 2 CPU DAIs. With the simple-card, the DT contains a number to reference these DAIs (for example, implicitly, <audio1 0> references the I2S output). So, what if the controller creates only one DAI, or, what if the FreeBSD/OpenBSD/.. driver does not set the same references to these DAIs? The graph of port fixes this problem.
Even with the simple-card bindings there are no software specific bits. The DAI that is referenced in your example is the physical DAI as it exists in the hardware. Which DAI maps to which specifier is defined in the devicetree bindings definition for the hardware unit.
More: a simple audio card may easily be created from a graph of ports as the simple-card does, but by the audio-controller (sorry, I also forgot the kirkwood patch for this in my previous patch request). In case of complex cards, the links and properties of this graph may also be used by board specific card devices.
One issue is how to deal with multi-point-to-multi-point links. I2S/TDM is a bus and can have more than one reader/writer.
The second issue is how to describe the clock and frame master relationships. Multiple different buses can share the same clock and frame generator. E.g. typically the capture and playback stream are linked in this way.
The ports and endpoints may contain properties to describe these configurations. Complex cases should be handled by specific card builders.
Could you describe in detail what a card builder is and how to decide when and how a card builder is executed?
How are we going to handle bus specific properties. Properties which are neither a property of either of the endpoints on the link, but of the link itself.
This is already the case for the bus types of the kirkwood controller, I2S or S/PDIF. Such properties may appear in either local or remote port, or in both.
BTW, the graph of port should also contain pieces of the audio specific hardware information as the ones found in the simple-card (clock, GPIO, ...). This information could be written as generic device node properties. i.e without any prefix.
I was also wondering about some of these properties, as widgets and routing. They seem to be software information and Linux specific. Must these properties appear in the DTs?
Well last time I checked the speaker on my board was hardware not software and wasn't Linux specific either ;) Those widgets and routing represent the (typically analog) audio fabric on the board and are part of the hardware description. This is not even ASoC or devicetree specific, e.g. HDA uses a similar concept where the BIOS provides a description of which pins of the audio CODEC is connected to which speaker, microphone, etc. And especially on embedded boards the audio fabric can become quite complex.
OK. I looked if the widgets and routes could also be described in a graph, but, it complexifies the syntax. So, this information could have the same syntax as in the simple-card.
Yea, using the graph syntax for the analog pins would result in a lot of boiler-plate.
On the other hand, where would this information appear in the graph? As I understood, on card creation, the widgets and routes, which appear at the card level, redefine the CPU and CODEC DAI definitions.
What do you mean by "redefine the CPU and CODEC DAI definitions".
With a DT graph, each CPU/CODEC would know exactly the widgets and routes it has to define.
Which widgets/routes do you mean?
Your example is a relative simple one where you do not have any additional audio fabric on the board itself.
Right, and that's why I'd be glad to have quickly something in the kernel. More properties could be added later as there would be requests.
I'd agree if this was some kind of kernel internal stuff, but this is creating ABI and we have to maintain it forever. Rushing this in without proper discussion and consideration of the more complex use-cases is in my opinion not a good idea.
- Lars
On Fri, Jan 23, 2015 at 02:56:04PM +0100, Lars-Peter Clausen wrote:
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote:
Your example is a relative simple one where you do not have any additional audio fabric on the board itself.
Right, and that's why I'd be glad to have quickly something in the kernel. More properties could be added later as there would be requests.
I'd agree if this was some kind of kernel internal stuff, but this is creating ABI and we have to maintain it forever. Rushing this in without proper discussion and consideration of the more complex use-cases is in my opinion not a good idea.
Just to repeat myself again: *please* try to provide more explanation for your commits, the concerns Lars is raising above are among the things that I'd have expected to be covered in a high quality changelog. There is no explanation at all in the changelog regarding the goal here, just statements that this is a graph based card without an actual card node.
On Fri, 23 Jan 2015 14:56:04 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote: [...]
The DT should describe the hardware, and the simple-card mixes hardware and software. For example, the kirkwood controller may create 2 CPU DAIs. With the simple-card, the DT contains a number to reference these DAIs (for example, implicitly, <audio1 0> references the I2S output). So, what if the controller creates only one DAI, or, what if the FreeBSD/OpenBSD/.. driver does not set the same references to these DAIs? The graph of port fixes this problem.
Even with the simple-card bindings there are no software specific bits. The DAI that is referenced in your example is the physical DAI as it exists in the hardware. Which DAI maps to which specifier is defined in the devicetree bindings definition for the hardware unit.
There is no first or second output in the kirkwood controller; there are I2S and S/PDIF outputs. Their hardware controls are done by bits in the different registers. Then, the driver may define its DAIs in any order and assign any DAI ID to these DAIs. So, for a given DAI, there is no relation between its type (I2s or S/PDIF), index in the dai driver (= asoc component index) and ID.
In the simple-card DT, the CPU DAI (as the CODEC DAI) is referenced by a phandle (the audio-controller) and a number which should identify the DAI. This number is translated to a DAI name thanks to the function snd_soc_get_dai_name(). This function scans the components and returns the name of the DAI (component) supported by the audio controller with the searched number equal to the component index (there is no 'xlate' function).
So, with the simple-card, the CPU or CODEC DAI binding (phandle + number) identifies what the software has put in its table, but not a hardware entity.
More: a simple audio card may easily be created from a graph of ports as the simple-card does, but by the audio-controller (sorry, I also forgot the kirkwood patch for this in my previous patch request). In case of complex cards, the links and properties of this graph may also be used by board specific card devices.
One issue is how to deal with multi-point-to-multi-point links. I2S/TDM is a bus and can have more than one reader/writer.
The second issue is how to describe the clock and frame master relationships. Multiple different buses can share the same clock and frame generator. E.g. typically the capture and playback stream are linked in this way.
The ports and endpoints may contain properties to describe these configurations. Complex cases should be handled by specific card builders.
Could you describe in detail what a card builder is and how to decide when and how a card builder is executed?
A card builder is a device which - scans the graph of ports, - fills the struct snd_soc_card according to the links between the ports and their properties, - and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
For complex cards, a card builder would be the same as an actual card driver, but with the capability to know about the board thanks to the graph of ports. Its creation could be done by a compatible device in the DT (as actually) or by the unique or a 'master' audio controller.
A card builder is subject to a PROBE_DEFER after calling snd_soc_register_card(), so, when it is created by an audio controller, this last one should have created its own resources.
An audio controller does the following: - initialize the hardware and software, - create the DAIs from: - the graph of ports or - default values - register these DAIs and - if the DAIs where created by a graph of port, create the card builder.
On the other hand, where would this information appear in the graph? As I understood, on card creation, the widgets and routes, which appear at the card level, redefine the CPU and CODEC DAI definitions.
What do you mean by "redefine the CPU and CODEC DAI definitions".
With a DT graph, each CPU/CODEC would know exactly the widgets and routes it has to define.
Which widgets/routes do you mean?
Well, forget about this. I never clearly understood why some widgets and routes had to be defined at card level.
Your example is a relative simple one where you do not have any additional audio fabric on the board itself.
Right, and that's why I'd be glad to have quickly something in the kernel. More properties could be added later as there would be requests.
I'd agree if this was some kind of kernel internal stuff, but this is creating ABI and we have to maintain it forever. Rushing this in without proper discussion and consideration of the more complex use-cases is in my opinion not a good idea.
Using a graph of port to describe the audio subsystem has been pushed forwards by many people for a long time, as shown by the creation of the document Documentation/devicetree/bindings/graph.txt.
Anyway, I am also glad to have this discussion.
On Fri, Jan 23, 2015 at 07:34:56PM +0100, Jean-Francois Moine wrote:
A card builder is a device which
- scans the graph of ports,
- fills the struct snd_soc_card according to the links between the ports and their properties,
- and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
Which audio controller? There may be several CPU side audio interfaces in the same card. For example people often want to have both low latency and high latency audio paths from the CPU into the hardware (low latency tends to increase power burn). SoC centric system designs do sometimes also have PDM I/O, expecting to be directly connected to DMICs and so on, which results in a relatively large number of CPU interfaces.
With a DT graph, each CPU/CODEC would know exactly the widgets and routes it has to define.
Which widgets/routes do you mean?
Well, forget about this. I never clearly understood why some widgets and routes had to be defined at card level.
Please do try to understand the idea of representing simple components on the board and analogue interconects between devices - it's really important and not something that can be neglected.
I'd agree if this was some kind of kernel internal stuff, but this is creating ABI and we have to maintain it forever. Rushing this in without proper discussion and consideration of the more complex use-cases is in my opinion not a good idea.
Using a graph of port to describe the audio subsystem has been pushed forwards by many people for a long time, as shown by the creation of the document Documentation/devicetree/bindings/graph.txt.
That DT binding was done entirely in the context of video applications IIRC, this is the first time it's been discussed in this context.
On Fri, 23 Jan 2015 19:13:43 +0000 Mark Brown broonie@kernel.org wrote:
On Fri, Jan 23, 2015 at 07:34:56PM +0100, Jean-Francois Moine wrote:
A card builder is a device which
- scans the graph of ports,
- fills the struct snd_soc_card according to the links between the ports and their properties,
- and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
Which audio controller? There may be several CPU side audio interfaces in the same card. For example people often want to have both low latency and high latency audio paths from the CPU into the hardware (low latency tends to increase power burn). SoC centric system designs do sometimes also have PDM I/O, expecting to be directly connected to DMICs and so on, which results in a relatively large number of CPU interfaces.
The audio controller which creates the card depends on the complexity of the card. When there are many controllers, it is up to the designer to define either a master audio controller or to instantiate a 'card' device via the DT for doing the job.
With a DT graph, each CPU/CODEC would know exactly the widgets and routes it has to define.
Which widgets/routes do you mean?
Well, forget about this. I never clearly understood why some widgets and routes had to be defined at card level.
Please do try to understand the idea of representing simple components on the board and analogue interconects between devices - it's really important and not something that can be neglected.
The problem is that this understanding would stay abstract: I have no such a hardware. Anyway, if the representation can be done with the simple-card, it may also be done with a graph of ports.
I'd agree if this was some kind of kernel internal stuff, but this is creating ABI and we have to maintain it forever. Rushing this in without proper discussion and consideration of the more complex use-cases is in my opinion not a good idea.
Using a graph of port to describe the audio subsystem has been pushed forwards by many people for a long time, as shown by the creation of the document Documentation/devicetree/bindings/graph.txt.
That DT binding was done entirely in the context of video applications IIRC, this is the first time it's been discussed in this context.
http://mailman.alsa-project.org/pipermail/alsa-devel/2014-January/070622.htm... http://mailman.alsa-project.org/pipermail/alsa-devel/2015-January/086273.htm...
On Sat, Jan 24, 2015 at 08:30:27AM +0100, Jean-Francois Moine wrote:
Mark Brown broonie@kernel.org wrote:
On Fri, Jan 23, 2015 at 07:34:56PM +0100, Jean-Francois Moine wrote:
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
Which audio controller? There may be several CPU side audio interfaces in the same card. For example people often want to have both low latency and high latency audio paths from the CPU into the hardware (low latency tends to increase power burn). SoC centric system designs do sometimes also have PDM I/O, expecting to be directly connected to DMICs and so on, which results in a relatively large number of CPU interfaces.
The audio controller which creates the card depends on the complexity of the card. When there are many controllers, it is up to the designer to define either a master audio controller or to instantiate a 'card' device via the DT for doing the job.
So how does the simple controller interact with a more complex one given that it's somehow picking some controller node to start from?
Well, forget about this. I never clearly understood why some widgets and routes had to be defined at card level.
Please do try to understand the idea of representing simple components on the board and analogue interconects between devices - it's really important and not something that can be neglected.
The problem is that this understanding would stay abstract: I have no such a hardware. Anyway, if the representation can be done with the simple-card, it may also be done with a graph of ports.
If you have a device with any sort of speaker or microphone, or any sort of external connector for interfacing with an external device like a headphone jack, then you have something that could be a widget.
That DT binding was done entirely in the context of video applications IIRC, this is the first time it's been discussed in this context.
http://mailman.alsa-project.org/pipermail/alsa-devel/2014-January/070622.htm... http://mailman.alsa-project.org/pipermail/alsa-devel/2015-January/086273.htm...
So there's been some in passing mentions, not really serious discussion though...
On Tue, 3 Feb 2015 16:47:48 +0000 Mark Brown broonie@kernel.org wrote:
On Sat, Jan 24, 2015 at 08:30:27AM +0100, Jean-Francois Moine wrote:
Mark Brown broonie@kernel.org wrote:
On Fri, Jan 23, 2015 at 07:34:56PM +0100, Jean-Francois Moine wrote:
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
Which audio controller? There may be several CPU side audio interfaces in the same card. For example people often want to have both low latency and high latency audio paths from the CPU into the hardware (low latency tends to increase power burn). SoC centric system designs do sometimes also have PDM I/O, expecting to be directly connected to DMICs and so on, which results in a relatively large number of CPU interfaces.
The audio controller which creates the card depends on the complexity of the card. When there are many controllers, it is up to the designer to define either a master audio controller or to instantiate a 'card' device via the DT for doing the job.
So how does the simple controller interact with a more complex one given that it's somehow picking some controller node to start from?
A way to solve this problem could be to create only one card builder. This creation could be explicit (created by the first active audio controller) or implicit by the audio subsystem on the first controller or CODEC creation.
Then, the card builder could scan all the DT looking for the audio ports and create one or more cards according to the graph connectivity.
Well, forget about this. I never clearly understood why some widgets and routes had to be defined at card level.
Please do try to understand the idea of representing simple components on the board and analogue interconects between devices - it's really important and not something that can be neglected.
The problem is that this understanding would stay abstract: I have no such a hardware. Anyway, if the representation can be done with the simple-card, it may also be done with a graph of ports.
If you have a device with any sort of speaker or microphone, or any sort of external connector for interfacing with an external device like a headphone jack, then you have something that could be a widget.
I know what are the widgets and routes, I was just wondering why they (especially the widgets) need to appear at the card level instead of just being declared in the DAIs (from the platform or the DT). And the same question may also be raised about the audio formats, clocks, tdm's...
That DT binding was done entirely in the context of video applications IIRC, this is the first time it's been discussed in this context.
http://mailman.alsa-project.org/pipermail/alsa-devel/2014-January/070622.htm... http://mailman.alsa-project.org/pipermail/alsa-devel/2015-January/086273.htm...
So there's been some in passing mentions, not really serious discussion though...
I may go back about the card builder, but Russell's idea about declaring the tda998x audio parameters by a port as declared in a graph of ports seems fine to me. This declaration should be compatible with the use of the simple-card.
On Tue, Feb 03, 2015 at 08:31:30PM +0100, Jean-Francois Moine wrote:
Mark Brown broonie@kernel.org wrote:
So how does the simple controller interact with a more complex one given that it's somehow picking some controller node to start from?
A way to solve this problem could be to create only one card builder. This creation could be explicit (created by the first active audio controller) or implicit by the audio subsystem on the first controller or CODEC creation.
Then, the card builder could scan all the DT looking for the audio ports and create one or more cards according to the graph connectivity.
How is this going to work with dynamically instantiated hardware like DT overlays?
If you have a device with any sort of speaker or microphone, or any sort of external connector for interfacing with an external device like a headphone jack, then you have something that could be a widget.
I know what are the widgets and routes, I was just wondering why they (especially the widgets) need to appear at the card level instead of just being declared in the DAIs (from the platform or the DT).
As previously and repeatedly discussed DAIs have no special place in a general audio system and we can't base the entire system off them. Which DAI should have the headphone jack connected to the analogue only headphone driver in my system (there may not even be a way to route digital audio to it)? How does this work for off-SoC audio hubs where there is a device with multiple DAIs connected to both one or more other digital devices and the analogue?
Please go and research this if you're intending to work on generic bindings, it gets extremely repetitive to have to go over this again and again. We already have simple-card to provide a binding for trivial systems and don't want to end up with a never ending series of slightly more complicated bindings which each cover slightly different sets of systems in ways that users struggle to differentiate between.
And the same question may also be raised about the audio formats, clocks, tdm's...
Similar things here - which of the two or more devices on a digital audio link (yes, they're buses not point to point links) has the configuration and how do we stitch them together? How do we figure out when and how to do runtime reconfiguration of the clock tree (which is needed by some systems)?
Again, please do some research on this. If you are trying to define generic device tree bindings it is really important that you understand what you are trying to model with those bindings.
On 01/23/2015 07:34 PM, Jean-Francois Moine wrote:
On Fri, 23 Jan 2015 14:56:04 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote: [...]
The DT should describe the hardware, and the simple-card mixes hardware and software. For example, the kirkwood controller may create 2 CPU DAIs. With the simple-card, the DT contains a number to reference these DAIs (for example, implicitly, <audio1 0> references the I2S output). So, what if the controller creates only one DAI, or, what if the FreeBSD/OpenBSD/.. driver does not set the same references to these DAIs? The graph of port fixes this problem.
Even with the simple-card bindings there are no software specific bits. The DAI that is referenced in your example is the physical DAI as it exists in the hardware. Which DAI maps to which specifier is defined in the devicetree bindings definition for the hardware unit.
There is no first or second output in the kirkwood controller; there are I2S and S/PDIF outputs. Their hardware controls are done by bits in the different registers. Then, the driver may define its DAIs in any order and assign any DAI ID to these DAIs. So, for a given DAI, there is no relation between its type (I2s or S/PDIF), index in the dai driver (= asoc component index) and ID.
In the simple-card DT, the CPU DAI (as the CODEC DAI) is referenced by a phandle (the audio-controller) and a number which should identify the DAI. This number is translated to a DAI name thanks to the function snd_soc_get_dai_name(). This function scans the components and returns the name of the DAI (component) supported by the audio controller with the searched number equal to the component index (there is no 'xlate' function).
So, with the simple-card, the CPU or CODEC DAI binding (phandle + number) identifies what the software has put in its table, but not a hardware entity.
Are you talking about the kirkwood driver or about things in general?
What you are saying is not true in general, if it is true for the kirkwood driver than the driver is broken in regards to DT bindings.
The specifier that follows after the phandle is hardware specific. This can be any number of integers. And the meaning of this is defined in the bindings specification for that hardware.
Many specifications define a binding in which either no specifier needs to be supplied, since there is only a single DAI, or one integer specifier which denotes which DAI is referenced for a multi-DAI controller. In the later case the bindings specification has to state which number maps to which physical DAI. Out of convenience many drivers use the same order as given in the bindings specification for their ASoC DAIs. Which is why the ASoC core provides a standard implementation for translating phandle + specifier to a ASoC DAI. But it is important to remember that the authoritative entity here is the bindings specification and not the driver.
If the bindings specification uses a different scheme for referencing the DAIs a custom xlate function needs to be provided by the driver confirms to this specification.
More: a simple audio card may easily be created from a graph of ports as the simple-card does, but by the audio-controller (sorry, I also forgot the kirkwood patch for this in my previous patch request). In case of complex cards, the links and properties of this graph may also be used by board specific card devices.
One issue is how to deal with multi-point-to-multi-point links. I2S/TDM is a bus and can have more than one reader/writer.
The second issue is how to describe the clock and frame master relationships. Multiple different buses can share the same clock and frame generator. E.g. typically the capture and playback stream are linked in this way.
The ports and endpoints may contain properties to describe these configurations. Complex cases should be handled by specific card builders.
Could you describe in detail what a card builder is and how to decide when and how a card builder is executed?
A card builder is a device which
- scans the graph of ports,
- fills the struct snd_soc_card according to the links between the ports and their properties,
- and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
For complex cards, a card builder would be the same as an actual card driver, but with the capability to know about the board thanks to the graph of ports. Its creation could be done by a compatible device in the DT (as actually) or by the unique or a 'master' audio controller.
A card builder is subject to a PROBE_DEFER after calling snd_soc_register_card(), so, when it is created by an audio controller, this last one should have created its own resources.
An audio controller does the following:
- initialize the hardware and software,
- create the DAIs from:
- the graph of ports or
- default values
- register these DAIs and
- if the DAIs where created by a graph of port, create the card builder.
I don't understand how invoking the card builder from the CPU side audio controller driver will work. The audio controller driver does not know what card it is part of, so how does it decide which builder to call, and what happens in the case of multiple CPU side audio controllers involved in the same card. In rare cases there might not even be a CPU side audio controller involved at all.
I think this is something that needs to be done in the ASoC/ALSA core itself. Create the graph, wait until all endpoints of the graph have been registered and then create the card. Or something similar.
- Lars
On Sat, 24 Jan 2015 12:27:35 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 07:34 PM, Jean-Francois Moine wrote:
On Fri, 23 Jan 2015 14:56:04 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote:
There is no first or second output in the kirkwood controller; there are I2S and S/PDIF outputs. Their hardware controls are done by bits in the different registers. Then, the driver may define its DAIs in any order and assign any DAI ID to these DAIs. So, for a given DAI, there is no relation between its type (I2s or S/PDIF), index in the dai driver (= asoc component index) and ID.
In the simple-card DT, the CPU DAI (as the CODEC DAI) is referenced by a phandle (the audio-controller) and a number which should identify the DAI. This number is translated to a DAI name thanks to the function snd_soc_get_dai_name(). This function scans the components and returns the name of the DAI (component) supported by the audio controller with the searched number equal to the component index (there is no 'xlate' function).
So, with the simple-card, the CPU or CODEC DAI binding (phandle + number) identifies what the software has put in its table, but not a hardware entity.
Are you talking about the kirkwood driver or about things in general?
I was talking about the kirkwood driver for the I2S and S/PDIF numbers. Other drivers may have the same problems.
What you are saying is not true in general, if it is true for the kirkwood driver than the driver is broken in regards to DT bindings.
The specifier that follows after the phandle is hardware specific. This can be any number of integers. And the meaning of this is defined in the bindings specification for that hardware.
Many specifications define a binding in which either no specifier needs to be supplied, since there is only a single DAI, or one integer specifier which denotes which DAI is referenced for a multi-DAI controller. In the later case the bindings specification has to state which number maps to which physical DAI. Out of convenience many drivers use the same order as given in the bindings specification for their ASoC DAIs. Which is why the ASoC core provides a standard implementation for translating phandle + specifier to a ASoC DAI. But it is important to remember that the authoritative entity here is the bindings specification and not the driver.
If the bindings specification uses a different scheme for referencing the DAIs a custom xlate function needs to be provided by the driver confirms to this specification.
I did not have a look at any other multi-DAI audio CPU/CODEC, so, I can be wrong.
All I can say is: - the driver is responsible to define its DAIs, - it may create them in any order, - the index of these DAIs is used in the DT.
Then, for you, the DT binding should clearly indicate the relation between the specifier that follows the phandle and the hardware DAI.
For the kirkwood audio controller, with a DT as:
audio-ports = "spdif", "i2s";
the DT documentation should say:
- option 1: "the DAI specifier is always '0' for I2S and '1' for S/PDIF"
- option 2: "the DAI specifier is the index of the port in the port declaration"
The actual problem is that there is no port definition in the DT for the kirkwood audio controller.
On the other side, referencing the DAI by an endpoint nullifies the DAI specifier problem: this last one does not exist anymore!
Could you describe in detail what a card builder is and how to decide when and how a card builder is executed?
A card builder is a device which
- scans the graph of ports,
- fills the struct snd_soc_card according to the links between the ports and their properties,
- and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
For complex cards, a card builder would be the same as an actual card driver, but with the capability to know about the board thanks to the graph of ports. Its creation could be done by a compatible device in the DT (as actually) or by the unique or a 'master' audio controller.
A card builder is subject to a PROBE_DEFER after calling snd_soc_register_card(), so, when it is created by an audio controller, this last one should have created its own resources.
An audio controller does the following:
- initialize the hardware and software,
- create the DAIs from:
- the graph of ports or
- default values
- register these DAIs and
- if the DAIs where created by a graph of port, create the card builder.
I don't understand how invoking the card builder from the CPU side audio controller driver will work. The audio controller driver does not know what card it is part of, so how does it decide which builder to call, and what happens in the case of multiple CPU side audio controllers involved in the same card. In rare cases there might not even be a CPU side audio controller involved at all.
Each entity referenced in the graph knows each other one and the simple dt-card builder works only for one audio controller and one CODEC level (each audio link in the graph has the controller as an endpoint).
For more complex cards, a DT information must exist. It can be:
- a board specific sound node:
sound { compatible = "myboard,audio-card"; ... card properties ... };
- a device specific sound node:
sound { compatible = "mydevice,audio-card"; audio-root = <&audio1>; /* starting point of the graph */ ... card properties ... };
- a generic sound node in the case of multi controllers or multi codec levels (after dt-card extension):
sound { compatible = "linux,dt-card"; audio-root = <&audio1>; /* starting point of the graph */ ... card properties ... };
For the last case, the creation of the simple dt-card builder could be done by a node in the controller, avoiding the DT to have a knowledge of this piece of software:
&audio1 { ... audio-card { ... card properties ... } port@0 { ... }; ... };
I think this is something that needs to be done in the ASoC/ALSA core itself. Create the graph, wait until all endpoints of the graph have been registered and then create the card. Or something similar.
To go further, such a function could fully replace snd_soc_register_card()!
On 01/24/2015 02:18 PM, Jean-Francois Moine wrote:
On Sat, 24 Jan 2015 12:27:35 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 07:34 PM, Jean-Francois Moine wrote:
On Fri, 23 Jan 2015 14:56:04 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
On 01/23/2015 01:15 PM, Jean-Francois Moine wrote:
There is no first or second output in the kirkwood controller; there are I2S and S/PDIF outputs. Their hardware controls are done by bits in the different registers. Then, the driver may define its DAIs in any order and assign any DAI ID to these DAIs. So, for a given DAI, there is no relation between its type (I2s or S/PDIF), index in the dai driver (= asoc component index) and ID.
In the simple-card DT, the CPU DAI (as the CODEC DAI) is referenced by a phandle (the audio-controller) and a number which should identify the DAI. This number is translated to a DAI name thanks to the function snd_soc_get_dai_name(). This function scans the components and returns the name of the DAI (component) supported by the audio controller with the searched number equal to the component index (there is no 'xlate' function).
So, with the simple-card, the CPU or CODEC DAI binding (phandle + number) identifies what the software has put in its table, but not a hardware entity.
Are you talking about the kirkwood driver or about things in general?
I was talking about the kirkwood driver for the I2S and S/PDIF numbers. Other drivers may have the same problems.
What you are saying is not true in general, if it is true for the kirkwood driver than the driver is broken in regards to DT bindings.
The specifier that follows after the phandle is hardware specific. This can be any number of integers. And the meaning of this is defined in the bindings specification for that hardware.
Many specifications define a binding in which either no specifier needs to be supplied, since there is only a single DAI, or one integer specifier which denotes which DAI is referenced for a multi-DAI controller. In the later case the bindings specification has to state which number maps to which physical DAI. Out of convenience many drivers use the same order as given in the bindings specification for their ASoC DAIs. Which is why the ASoC core provides a standard implementation for translating phandle + specifier to a ASoC DAI. But it is important to remember that the authoritative entity here is the bindings specification and not the driver.
If the bindings specification uses a different scheme for referencing the DAIs a custom xlate function needs to be provided by the driver confirms to this specification.
I did not have a look at any other multi-DAI audio CPU/CODEC, so, I can be wrong.
All I can say is:
- the driver is responsible to define its DAIs,
- it may create them in any order,
- the index of these DAIs is used in the DT.
The index in the DT needs to match the index that is in the DT specification. This does not necessarily have to match the ASoC driver ID. But since it makes things easier for the driver a lot of drivers opt for using the same IDs as specified in the DT specification.
Then, for you, the DT binding should clearly indicate the relation between the specifier that follows the phandle and the hardware DAI.
For the kirkwood audio controller, with a DT as:
audio-ports = "spdif", "i2s";
the DT documentation should say:
option 1: "the DAI specifier is always '0' for I2S and '1' for S/PDIF"
option 2: "the DAI specifier is the index of the port in the port declaration"
The actual problem is that there is no port definition in the DT for the kirkwood audio controller.
On the other side, referencing the DAI by an endpoint nullifies the DAI specifier problem: this last one does not exist anymore!
Could you describe in detail what a card builder is and how to decide when and how a card builder is executed?
A card builder is a device which
- scans the graph of ports,
- fills the struct snd_soc_card according to the links between the ports and their properties,
- and, eventually, calls snd_soc_register_card().
The simple card builder, 'dt-card' (maybe a better name would have been 'graph-card'), acts just like the simple-card except that it does not appear in the DT. Its creation is done by an audio controller.
For complex cards, a card builder would be the same as an actual card driver, but with the capability to know about the board thanks to the graph of ports. Its creation could be done by a compatible device in the DT (as actually) or by the unique or a 'master' audio controller.
A card builder is subject to a PROBE_DEFER after calling snd_soc_register_card(), so, when it is created by an audio controller, this last one should have created its own resources.
An audio controller does the following:
- initialize the hardware and software,
- create the DAIs from:
- the graph of ports or
- default values
- register these DAIs and
- if the DAIs where created by a graph of port, create the card builder.
I don't understand how invoking the card builder from the CPU side audio controller driver will work. The audio controller driver does not know what card it is part of, so how does it decide which builder to call, and what happens in the case of multiple CPU side audio controllers involved in the same card. In rare cases there might not even be a CPU side audio controller involved at all.
Each entity referenced in the graph knows each other one and the simple dt-card builder works only for one audio controller and one CODEC level (each audio link in the graph has the controller as an endpoint).
For more complex cards, a DT information must exist. It can be:
a board specific sound node:
sound { compatible = "myboard,audio-card"; ... card properties ... };
a device specific sound node:
sound { compatible = "mydevice,audio-card"; audio-root = <&audio1>; /* starting point of the graph */ ... card properties ... };
a generic sound node in the case of multi controllers or multi codec levels (after dt-card extension):
sound { compatible = "linux,dt-card"; audio-root = <&audio1>; /* starting point of the graph */ ... card properties ... };
For the last case, the creation of the simple dt-card builder could be done by a node in the controller, avoiding the DT to have a knowledge of this piece of software:
&audio1 { ... audio-card { ... card properties ... } port@0 { ... }; ... };
Is there any advantage to putting the card node inside the controller node rather than having it as a separate node?
I think this is something that needs to be done in the ASoC/ALSA core itself. Create the graph, wait until all endpoints of the graph have been registered and then create the card. Or something similar.
To go further, such a function could fully replace snd_soc_register_card()!
Yes, if the graph is strongly connected (which it should be) the framework will be able to identify when all components that belong to the graph have been registered and is then able to create a card for it.
Are you by chance at FOSDEM? If you are maybe we can sit down for a moment and discuss things.
- Lars
On Mon, 26 Jan 2015 12:53:53 +0100 Lars-Peter Clausen lars@metafoo.de wrote:
a generic sound node in the case of multi controllers or multi codec levels (after dt-card extension):
sound { compatible = "linux,dt-card"; audio-root = <&audio1>; /* starting point of the graph */ ... card properties ... };
For the last case, the creation of the simple dt-card builder could be done by a node in the controller, avoiding the DT to have a knowledge of this piece of software:
&audio1 { ... audio-card { ... card properties ... } port@0 { ... }; ... };
Is there any advantage to putting the card node inside the controller node rather than having it as a separate node?
There is no advantage, but it seems to me that the sound device is a software entity which should not appear in the devicetree.
I think this is something that needs to be done in the ASoC/ALSA core itself. Create the graph, wait until all endpoints of the graph have been registered and then create the card. Or something similar.
To go further, such a function could fully replace snd_soc_register_card()!
Yes, if the graph is strongly connected (which it should be) the framework will be able to identify when all components that belong to the graph have been registered and is then able to create a card for it.
Russell's "Componentized device handling" would permit to synchronize all components avoiding the PROBE_DEFERs, but there is a problem with the tda998x: this one is a component of both the audio and video subsystems, and the bind() callback does not indicate by which master compoment it is called...
Are you by chance at FOSDEM? If you are maybe we can sit down for a moment and discuss things.
Sorry, I will not be at FOSDEM.
This patch adds a document describing common OF bindings for audio devices.
Signed-off-by: Jean-Francois Moine moinejf@free.fr --- .../devicetree/bindings/sound/audio-interfaces.txt | 101 +++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 Documentation/devicetree/bindings/sound/audio-interfaces.txt
diff --git a/Documentation/devicetree/bindings/sound/audio-interfaces.txt b/Documentation/devicetree/bindings/sound/audio-interfaces.txt new file mode 100644 index 0000000..4eddb3f --- /dev/null +++ b/Documentation/devicetree/bindings/sound/audio-interfaces.txt @@ -0,0 +1,101 @@ +Common bindings for audio device graphs + +The graph of the audio ports follows the common binding for device graphs +defined in Documentation/devicetree/bindings/graph.txt. + +Here are described only the audio specific properties. + +Port required properties: + +- port-type: "i2s" or "spdif" + +Port optional property: + +- reg: numeric value which defines how the port is wired to the device. + This value depends on the device. Usually, it is the content + of the device register which controls the audio pins. + +Port required nodes: + +- at least one 'endpoint' node must be specified. + + +Example: + +The board contains an audio controller with two outputs: +- the S/PDIF output is connected to two devices: + - a S/PDIF optical output + - a HDMI transmitter. +- the I2S output is connected to an other audio input of the HDMI transmitter. + + /* audio controller */ + &audio1 { + status = "okay"; + ... + #address-cells = <1>; + #size-cells = <0>; + + /* S/PDIF output */ + port@0 { + port-type = "spdif"; + audio1_spdif0: endpoint@0 { + remote-endpoint = <&spdif_out>; + }; + audio1_spdif1: endpoint@1 { + remote-endpoint = <&tda998x_spdif>; + }; + }; + + /* I2S output */ + port@1 { + port-type = "i2s"; + audio1_i2s: endpoint { + remote-endpoint = <&tda998x_i2s>; + }; + }; + }; + + /* optical output */ + spdif_codec: spdif-codec { + ... + port { + port-type = "spdif"; + spdif_out: endpoint { + remote-endpoint = <&audio1_spdif0>; + }; + }; + }; + + /* HDMI transmitter */ + hdmi: hdmi-encoder { + ... + #address-cells = <1>; + #size-cells = <0>; + + /* video input */ + port@230145 { + port-type = "rgb"; + reg = <0x230145>; + hdmi_0: endpoint { + remote-endpoint = <&lcd0_0>; + }; + }; + + /* audio input I2S on AP1 */ + port@3 { + port-type = "i2s"; + reg = <0x03>; + tda998x_i2s: endpoint { + remote-endpoint = <&audio1_i2s>; + }; + }; + + /* audio input S/PDIF on AP2 */ + port@4 { + port-type = "spdif"; + reg = <0x04>; + tda998x_spdif: endpoint { + remote-endpoint = <&audio1_spdif1>; + }; + }; + };
participants (3)
-
Jean-Francois Moine
-
Lars-Peter Clausen
-
Mark Brown