summaryrefslogtreecommitdiffstats
path: root/doc/artsbuilder
diff options
context:
space:
mode:
Diffstat (limited to 'doc/artsbuilder')
-rw-r--r--doc/artsbuilder/Makefile.am5
-rw-r--r--doc/artsbuilder/apis.docbook357
-rw-r--r--doc/artsbuilder/arts-structure.pngbin0 -> 8955 bytes
-rw-r--r--doc/artsbuilder/artsbuilder.docbook864
-rw-r--r--doc/artsbuilder/detail.docbook1765
-rw-r--r--doc/artsbuilder/digitalaudio.docbook14
-rw-r--r--doc/artsbuilder/faq.docbook1112
-rw-r--r--doc/artsbuilder/future.docbook414
-rw-r--r--doc/artsbuilder/glossary.docbook164
-rw-r--r--doc/artsbuilder/gui.docbook28
-rw-r--r--doc/artsbuilder/helping.docbook246
-rw-r--r--doc/artsbuilder/images/Doc_MODUL.pngbin0 -> 2377 bytes
-rw-r--r--doc/artsbuilder/images/Gui_AUDIO_MANAGER.pngbin0 -> 313 bytes
-rw-r--r--doc/artsbuilder/images/Gui_INSTRUMENT_MAPPER.pngbin0 -> 306 bytes
-rw-r--r--doc/artsbuilder/images/Gui_LABEL.pngbin0 -> 1771 bytes
-rw-r--r--doc/artsbuilder/images/Gui_MIXER.pngbin0 -> 1729 bytes
-rw-r--r--doc/artsbuilder/images/Gui_PANEL.pngbin0 -> 1706 bytes
-rw-r--r--doc/artsbuilder/images/Gui_POTI.pngbin0 -> 1803 bytes
-rw-r--r--doc/artsbuilder/images/Gui_SLIDER.pngbin0 -> 1934 bytes
-rw-r--r--doc/artsbuilder/images/Gui_SUBPANEL.pngbin0 -> 1839 bytes
-rw-r--r--doc/artsbuilder/images/Gui_WINDOW.pngbin0 -> 1513 bytes
-rw-r--r--doc/artsbuilder/images/Interface_MIDI_NOTE.pngbin0 -> 1018 bytes
-rw-r--r--doc/artsbuilder/images/Makefile.am4
-rw-r--r--doc/artsbuilder/images/Synth_ADD.pngbin0 -> 1129 bytes
-rw-r--r--doc/artsbuilder/images/Synth_ATAN_SATURATE.pngbin0 -> 1266 bytes
-rw-r--r--doc/artsbuilder/images/Synth_BUS_DOWNLINK.pngbin0 -> 1577 bytes
-rw-r--r--doc/artsbuilder/images/Synth_BUS_UPLINK.pngbin0 -> 1503 bytes
-rw-r--r--doc/artsbuilder/images/Synth_CDELAY.pngbin0 -> 1011 bytes
-rw-r--r--doc/artsbuilder/images/Synth_COMPRESSOR.pngbin0 -> 680 bytes
-rw-r--r--doc/artsbuilder/images/Synth_DEBUG.pngbin0 -> 1271 bytes
-rw-r--r--doc/artsbuilder/images/Synth_DELAY.pngbin0 -> 538 bytes
-rw-r--r--doc/artsbuilder/images/Synth_DIV.pngbin0 -> 2851 bytes
-rw-r--r--doc/artsbuilder/images/Synth_ENVELOPE_ADSR.pngbin0 -> 1514 bytes
-rw-r--r--doc/artsbuilder/images/Synth_FILEPLAY.pngbin0 -> 1384 bytes
-rw-r--r--doc/artsbuilder/images/Synth_FM_SOURCE.pngbin0 -> 1033 bytes
-rw-r--r--doc/artsbuilder/images/Synth_FREQUENCY.pngbin0 -> 954 bytes
-rw-r--r--doc/artsbuilder/images/Synth_MIDI_DEBUG.pngbin0 -> 1349 bytes
-rw-r--r--doc/artsbuilder/images/Synth_MIDI_ROUTER.pngbin0 -> 1058 bytes
-rw-r--r--doc/artsbuilder/images/Synth_MUL.pngbin0 -> 1237 bytes
-rw-r--r--doc/artsbuilder/images/Synth_NIL.pngbin0 -> 812 bytes
-rw-r--r--doc/artsbuilder/images/Synth_PLAY.pngbin0 -> 1467 bytes
-rw-r--r--doc/artsbuilder/images/Synth_PLAY_AKAI.pngbin0 -> 459 bytes
-rw-r--r--doc/artsbuilder/images/Synth_PLAY_AKAIS.pngbin0 -> 577 bytes
-rw-r--r--doc/artsbuilder/images/Synth_PLAY_WAV.pngbin0 -> 678 bytes
-rw-r--r--doc/artsbuilder/images/Synth_PSCALE.pngbin0 -> 1479 bytes
-rw-r--r--doc/artsbuilder/images/Synth_RC.pngbin0 -> 1000 bytes
-rw-r--r--doc/artsbuilder/images/Synth_SEQUENCE.pngbin0 -> 1511 bytes
-rw-r--r--doc/artsbuilder/images/Synth_SEQUENCE_FREQ.pngbin0 -> 2194 bytes
-rw-r--r--doc/artsbuilder/images/Synth_SHELVE_CUTOFF.pngbin0 -> 1005 bytes
-rw-r--r--doc/artsbuilder/images/Synth_STD_EQUALIZER.pngbin0 -> 598 bytes
-rw-r--r--doc/artsbuilder/images/Synth_STRUCT_KILL.pngbin0 -> 874 bytes
-rw-r--r--doc/artsbuilder/images/Synth_WAVE_SIN.pngbin0 -> 1199 bytes
-rw-r--r--doc/artsbuilder/images/Synth_WAVE_SQUARE.pngbin0 -> 1446 bytes
-rw-r--r--doc/artsbuilder/images/Synth_WAVE_TRI.pngbin0 -> 1194 bytes
-rw-r--r--doc/artsbuilder/images/Synth_XFADE.pngbin0 -> 1423 bytes
-rw-r--r--doc/artsbuilder/images/schema1.pngbin0 -> 1129 bytes
-rw-r--r--doc/artsbuilder/images/schema2.pngbin0 -> 1373 bytes
-rw-r--r--doc/artsbuilder/images/schema3.pngbin0 -> 976 bytes
-rw-r--r--doc/artsbuilder/images/schema4.pngbin0 -> 881 bytes
-rw-r--r--doc/artsbuilder/index.docbook393
-rw-r--r--doc/artsbuilder/mcop.docbook2274
-rw-r--r--doc/artsbuilder/midi.docbook474
-rw-r--r--doc/artsbuilder/midiintro.docbook14
-rw-r--r--doc/artsbuilder/modules.docbook1336
-rw-r--r--doc/artsbuilder/porting.docbook64
-rw-r--r--doc/artsbuilder/references.docbook56
-rw-r--r--doc/artsbuilder/tools.docbook735
67 files changed, 10319 insertions, 0 deletions
diff --git a/doc/artsbuilder/Makefile.am b/doc/artsbuilder/Makefile.am
new file mode 100644
index 00000000..c0ba5446
--- /dev/null
+++ b/doc/artsbuilder/Makefile.am
@@ -0,0 +1,5 @@
+
+SUBDIRS = $(AUTODIRS)
+KDE_LANG = en
+KDE_DOCS = AUTO
+
diff --git a/doc/artsbuilder/apis.docbook b/doc/artsbuilder/apis.docbook
new file mode 100644
index 00000000..65de23be
--- /dev/null
+++ b/doc/artsbuilder/apis.docbook
@@ -0,0 +1,357 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="arts-apis">
+<title>&arts; Application Programming Interfaces</title>
+
+<sect1 id="api-overview">
+<title>Overview</title>
+<para>
+aRts is not only a piece of software, it also provides a variety of APIs
+for a variety of purposes. In this section, I will try to describe the "big
+picture", a brief glance what those APIs are supposed to do, and how they
+interact.
+</para>
+
+<para>
+There is one important distinction to make: most of the APIs are <emphasis>
+language and location independent</emphasis> because they are specified as
+<emphasis>mcopidl</emphasis>.
+That is, you can basically use the services they offer from any language,
+implement them in any language, and you will not have to care whether you
+are talking to local or remote objects. Here is a list of these first:
+</para>
+
+
+<variablelist>
+<varlistentry>
+<term>core.idl</term>
+ <listitem><para>
+ Basic definitions that form the core of the MCOP functionality, such as
+ the protocol itself, definitions of the object, the trader, the flow
+ system and so on.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>artsflow.idl</term>
+
+ <listitem><para>
+ These contain the flow system you will use for connecting audio streams, the
+ definition of <emphasis>Arts::SynthModule</emphasis> which is the base for
+ any interface that has streams, and finally a few useful audio objects
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>kmedia2.idl</term>
+
+
+ <listitem><para>
+ Here, an object that can play a media, <emphasis>Arts::PlayObject</emphasis>
+ gets defined. Media players such as the KDE media player noatun will be able
+ to play any media for which a PlayObject can be found. So it makes sense to
+ implement PlayObjects for various formats (such as mp3, mpg video, midi, wav,
+ ...) on that base, and there are a lot already.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>soundserver.idl</term>
+
+ <listitem><para>
+ Here, an interface for the system wide sound server artsd is defined. The
+ interface is called <emphasis>Arts::SoundServer</emphasis>, which implements
+ functionality like accepting streams from the network, playing samples,
+ creating custom other aRts objects and so on. Network transparency is
+ implied due to the use of MCOP (as for everything else here).
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>artsbuilder.idl</term>
+ <listitem><para>
+ This module defines basic flow graph functionality, that is, combining
+ simpler objects to more complex ones, by defining a graph of them. It defines
+ the basic interface <emphasis>Arts::StructureDesc</emphasis>,
+ <emphasis>Arts::ModuleDesc</emphasis> and <emphasis>Arts::PortDesc</emphasis>
+ which contain a description of a structure, module, and port. There is also
+ a way to get a "living network of objects" out of these connection and value
+ descriptions, using a factory.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>artsmidi.idl</term>
+
+ <listitem><para>
+ This module defines basic midi functionality, like objects that produce
+ midi events, what is a midi event, an <emphasis>Arts::MidiManager</emphasis>
+ to connect the producers and consumers of midi events, and so on. As always
+ network transparency implied.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>artsmodules.idl</term>
+ <listitem><para>
+ Here are various additional filters, oscillators, effects, delays and
+ so on, everything required for real useful signal processing, and to
+ build complex instruments and effects out of these basic building blocks.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>artsgui.idl</term>
+
+ <listitem><para>
+ This cares about visual objects. It defines the basic type <emphasis>
+ Arts::Widget</emphasis> from which all GUI modules derive. This will produce
+ toolkit independency, and ... visual GUI editing, and serializable GUIs.
+ Also, as the GUI elements have normal attributes, their values can be
+ straight forward connected to some signal processing modules. (I.e. the
+ value of a slider to the cutoff of a filter). As always: network transparent.
+ </para></listitem>
+
+</varlistentry>
+
+</variablelist>
+<para>
+Where possible, aRts itself is implemented using IDL. On the other hand, there
+are some <emphasis>language specific</emphasis> APIs, using either plain C++ or
+plain C. It is usually wise to use IDL interfaces where possible, and the
+other APIs where necessary. Here is a list of language specific APIs:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term>KNotify, KAudioPlayer (included in libkdecore)</term>
+
+ <listitem><para>
+ These are convenience KDE APIs for the simple and common common case, where
+ you just want to play a sample. The APIs are plain C++, Qt/KDE optimized,
+ and as easy as it can get.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>libartsc</term>
+ <listitem><para>
+ Plain C interface for the sound server. Very useful for porting legacy
+ applications.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>libmcop</term>
+
+ <listitem><para>
+ Here all magic for MCOP happens. The library contains the basic things you
+ need to know for writing a simple MCOP application, the dispatcher, timers,
+ iomanagement, but also the internals to make the MCOP protocol itself work.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>libartsflow</term>
+ <listitem><para>
+ Besides the implementation of artsflow.idl, some useful utilities like
+ sampling rate conversion.
+ </para></listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>libqiomanager</term>
+
+ <listitem><para>
+ Integration of MCOP into the Qt event loop, when you write Qt applications
+ using MCOP.
+ </para></listitem>
+
+</varlistentry>
+
+</variablelist>
+
+
+
+</sect1>
+<sect1 id="knotify">
+<title>knotify</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="kaudioplayer">
+<title>kaudioplayer</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="libkmid">
+<title>libkmid</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="kmedia2">
+<title>kmedia2</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="soundserver">
+<title>sound server</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="artsflow">
+<title>artsflow</title>
+<para>
+Not yet written
+</para>
+</sect1>
+
+<sect1 id="capi">
+<title>C <acronym>API</acronym></title>
+
+<sect2 id="capiintro">
+<title>Introduction</title>
+
+<para> The &arts; C <acronym>API</acronym> was designed to make it easy to
+writing and port plain C applications to the &arts; sound server. It provides
+streaming functionality (sending sample streams to
+<application>artsd</application>), either blocking or non-blocking. For most
+applications you simply remove the few system calls that deal with your audio
+device and replace them with the appropriate &arts; calls.</para>
+
+<para>I did two ports as a proof of concept: <application>mpg123</application>
+and <application>quake</application>. You can get the patches from <ulink
+url="http://space.twc.de/~stefan/kde/download/artsc-patches.tar.gz">here</ulink>.
+Feel free to submit your own patches to the maintainer of &arts; or of
+multimedia software packages so that they can integrate &arts; support into
+their code.</para>
+
+</sect2>
+
+<sect2 id="capiwalkthru">
+<title>Quick Walkthrough</title>
+
+<para>Sending audio to the sound server with the <acronym>API</acronym> is very
+simple:</para>
+<procedure>
+<step><para>include the header file using <userinput>#include
+&lt;artsc.h&gt;</userinput></para></step>
+<step><para>initialize the <acronym>API</acronym> with
+<function>arts_init()</function></para></step>
+<step><para>create a stream with
+<function>arts_play_stream()</function></para></step>
+<step><para>configure specific parameters with
+<function>arts_stream_set()</function></para></step>
+<step><para>write sampling data to the stream with
+<function>arts_write()</function></para></step>
+<step><para>close the stream with
+<function>arts_close_stream()</function></para></step>
+<step><para>free the <acronym>API</acronym> with
+<function>arts_free()</function></para></step>
+</procedure>
+
+<para>Here is a small example program that illustrates this:</para>
+
+<programlisting>
+#include &lt;stdio.h&gt;
+#include &lt;artsc.h&gt;
+int main()
+{
+ arts_stream_t stream;
+ char buffer[8192];
+ int bytes;
+ int errorcode;
+
+ errorcode = arts_init();
+ if (errorcode &lt; 0)
+ {
+ fprintf(stderr, "arts_init error: %s\n", arts_error_text(errorcode));
+ return 1;
+ }
+
+ stream = arts_play_stream(44100, 16, 2, "artsctest");
+
+ while((bytes = fread(buffer, 1, 8192, stdin)) &gt; 0)
+ {
+ errorcode = arts_write(stream, buffer, bytes);
+ if(errorcode &lt; 0)
+ {
+ fprintf(stderr, "arts_write error: %s\n", arts_error_text(errorcode));
+ return 1;
+ }
+ }
+
+ arts_close_stream(stream);
+ arts_free();
+
+ return 0;
+}
+</programlisting>
+</sect2>
+
+<sect2 id="capiartscconfig">
+<title>Compiling and Linking: <application>artsc-config</application></title>
+
+<para>To easily compile and link programs using the &arts; C
+<acronym>API</acronym>, the <application>artsc-config</application> utility is
+provided which knows which libraries you need to link and where the includes
+are. It is called using</para>
+
+<screen>
+<userinput><command>artsc-config</command> <option>--libs</option></userinput>
+</screen>
+
+<para>to find out the libraries and </para>
+
+<screen>
+<userinput><command>artsc-config</command> <option>--cflags</option></userinput>
+</screen>
+
+<para>to find out additional C compiler flags. The example above could have been
+
+compiled using the command line:</para>
+
+<screen>
+<userinput><command>cc</command> <option>-o artsctest artsctest.c `artsc-config --cflags` `artsc-config --libs`</option></userinput>
+
+<userinput><command>cc</command> <option>-o artsctest</option> <option>artsctest.c</option> <option>`artsc-config --cflags`</option> <option>`artsc-config --libs`</option></userinput>
+</screen>
+
+</sect2>
+
+<sect2 id="c-api-reference">
+<title>Library Reference</title>
+
+<para>
+[TODO: generate the documentation for artsc.h using kdoc]
+</para>
+
+</sect2>
+
+</sect1>
+</chapter>
diff --git a/doc/artsbuilder/arts-structure.png b/doc/artsbuilder/arts-structure.png
new file mode 100644
index 00000000..8f6f3131
--- /dev/null
+++ b/doc/artsbuilder/arts-structure.png
Binary files differ
diff --git a/doc/artsbuilder/artsbuilder.docbook b/doc/artsbuilder/artsbuilder.docbook
new file mode 100644
index 00000000..b5f4f68c
--- /dev/null
+++ b/doc/artsbuilder/artsbuilder.docbook
@@ -0,0 +1,864 @@
+<chapter id="artsbuilder">
+<title>&arts-builder;</title>
+
+<sect1 id="overview">
+<title>Overview</title>
+
+<para>
+First of all, when trying to run &arts-builder; , you should also be
+running the sound server (&artsd;). Usually, when you use &kde; 2.1,
+this should already be the case. If not, you can configure the automatic
+sound server startup in &kcontrol; under
+<menuchoice><guilabel>Sound</guilabel><guilabel>Sound
+Server</guilabel></menuchoice>.
+</para>
+
+<para>
+When you are running &arts;, it always runs small modules. &arts-builder;
+is a tool to create new structures of small connected modules. You
+simply click the modules inside the grid. To do so, choose them from the
+<guimenu>Modules</guimenu> menu, and then click somewhere in the
+green-gray plane.
+</para>
+
+<para>
+Modules usually have ports (where usually audio signals are flowing in
+or out). To connect two ports, click on the first, which causes it to
+turn orange, and then click on the second. You can only connect an input
+port (on the upper side of a module) with an output port (on the lower
+side of a module). If you want to assign a fixed value to a port (or
+disconnect it), do so by double clicking on the port.
+</para>
+
+</sect1>
+
+<sect1 id="artsbuilder-tutorial">
+<title>Tutorial</title>
+
+<sect2 id="step-1">
+<title>Step 1</title>
+
+<para>
+Start &arts-builder;.
+</para>
+
+<para>
+You need a Synth&lowbar;AMAN&lowbar;PLAY-module to hear the output you
+are creating. So create a Synth&lowbar;AMAN&lowbar;PLAY-module by
+selecting <menuchoice><guimenu>Modules</guimenu>
+<guisubmenu>Synthesis</guisubmenu> <guisubmenu>SoundIO</guisubmenu>
+<guisubmenu>Synth&lowbar;AMAN&lowbar;PLAY</guisubmenu></menuchoice> and
+clicking on the empty module space. Put it below the fifth line or so,
+because we'll add some stuff above.
+</para>
+
+<para>
+The module will have a parameter <parameter>title</parameter> (leftmost
+port), and <parameter>autoRestoreID</parameter> (besides the leftmost
+port) for finding it. To fill these out, doubleclick on these ports,
+select constant value and type <userinput>tutorial</userinput> in the
+edit box. Click <guibutton>OK</guibutton> to apply.
+</para>
+
+<para>
+Select <menuchoice><guimenu>File</guimenu><guimenuitem>Execute
+structure</guimenuitem> </menuchoice>. You will hear absolutely
+nothing. The play module needs some input yet... ;) If you have listened
+to the silence for a while, click <guibutton>OK</guibutton> and go to
+Step 2
+</para>
+</sect2>
+
+<sect2 id="step-2">
+<title>Step 2</title>
+
+<para>Create a Synth&lowbar;WAVE&lowbar;SIN module (from <menuchoice>
+<guimenu>Modules</guimenu> <guimenuitem>Synthesis</guimenuitem>
+<guimenuitem>Waveforms</guimenuitem></menuchoice>)
+and put it above the Synth&lowbar;AMAN&lowbar;PLAY module. (Leave one line
+space in between).
+</para>
+
+<para>
+As you see, it produces some output, but requires a
+<guilabel>pos</guilabel> as input. First lets put the output to the
+speakers. Click on the <guilabel>out</guilabel> port of the
+Synth&lowbar;WAVE&lowbar;SIN and then on the <guilabel>left</guilabel>
+port of Synth&lowbar;AMAN&lowbar;PLAY. Voila, you have connected two
+modules.
+</para>
+
+<para>
+All oscillators in &arts; don't require a frequency as input, but a
+position in the wave. The position should be between 0 and 1, which maps
+for a standard Synth&lowbar;WAVE&lowbar;SIN object to the range
+0..2*pi. To generate oscillating values from a frequency, a
+Synth&lowbar;FREQUENCY modules is used.
+</para>
+
+<para>
+Create a Synth&lowbar;FREQUENCY module (from <menuchoice>
+<guimenu>Modules</guimenu> <guimenu>Synthesis</guimenu>
+<guimenu>Oscillation &amp; Modulation</guimenu> </menuchoice>) and
+connect it's <quote>pos</quote> output to the <quote>pos</quote> input
+of your Synth&lowbar;WAVE&lowbar;SIN. Specify the frequency port of the
+FREQUENCY generator as constant value 440.
+</para>
+
+
+<para>
+Select <menuchoice><guimenu>File</guimenu><guimenuitem>Execute
+structure</guimenuitem></menuchoice>. You will hear a sinus wave at 440
+Hz on one of your speakers. If you have listened to it for a while,
+click <guibutton>OK</guibutton> and go to Step 3.
+</para>
+
+</sect2>
+
+<sect2 id="step-3">
+<title>Step 3</title>
+
+<para>
+Ok, it would be nicer if you would hear the sin wave on both speakers.
+Connect the right port of Synth&lowbar;PLAY to the outvalue of the
+Synth&lowbar;WAVE&lowbar;SIN as well.
+</para>
+
+<para>Create a Synth&lowbar;SEQUENCE object (from
+<menuchoice><guimenu>Modules</guimenu>
+<guisubmenu>Synthesis</guisubmenu><guisubmenu>Midi &amp;
+Sequencing</guisubmenu></menuchoice>). It should be at the top of the
+screen. If you need more room you can move the other modules by
+selecting them (to select multiple modules use &Shift;), and dragging
+them around.
+</para>
+
+<para>
+Now connect the frequency output of Synth&lowbar;SEQUENCE to the
+frequency input of the Synth&lowbar;FREQUENCY module. Then specify the
+sequence speed as constant value 0.13 (the speed is the leftmost port).
+</para>
+
+<para>
+Now go to the rightmost port (sequence) of Synth&lowbar;SEQUENCE and
+type in as constant value <userinput>A-3;C-4;E-4;C-4;</userinput> this
+specifies a sequence. More to that in the Module Reference.
+</para>
+
+<note>
+<para>Synth&lowbar;SEQUENCE really <emphasis>needs</emphasis> a sequence
+and the speed. Without that you'll perhaps get core dumps.
+</para>
+</note>
+
+<para>
+Select <menuchoice><guimenu>File</guimenu><guimenuitem>Execute
+Structure</guimenuitem></menuchoice>. You will hear a nice sequence
+playing. If you have enjoyed the feeling, click
+<guibutton>OK</guibutton> and go to Step 4.
+</para>
+</sect2>
+
+<sect2 id="step-4">
+<title>Step 4</title>
+
+<para>Create a Synth&lowbar;PSCALE module (from
+<menuchoice><guimenu>Modules</guimenu>
+<guisubmenu>Synthesis</guisubmenu> <guisubmenu>Envelopes</guisubmenu>
+</menuchoice>). Disconnect the outvalue of the SIN wave by doubleclicking it
+and choosing <guilabel>not connected</guilabel>. Connect
+</para>
+
+<orderedlist><listitem>
+<para>The SIN outvalue to the PSCALE invalue</para>
+</listitem>
+<listitem>
+<para>The PSCALE outvalue to the AMAN_PLAY left</para>
+</listitem>
+<listitem>
+<para>The PSCALE outvalue to the AMAN_PLAY right</para>
+</listitem>
+<listitem>
+<para>The SEQUENCE pos to the PSCALE pos</para>
+</listitem>
+</orderedlist>
+
+<para>
+Finally, set the PSCALE top to some value, for instance 0.1.
+</para>
+
+<para>
+How that works now: The Synth&lowbar;SEQUENCE gives additional
+information about the position of the note it is playing right now,
+while 0 means just started and 1 means finished. The Synth&lowbar;PSCALE
+module will scale the audio stream that is directed through it from a
+volume 0 (silent) to 1 (original loudness) back to 0 (silent). According
+to the position. The position where the peak should occur can be given
+as pos. 0.1 means that after 10&percnt; of the note has been played, the
+volume has reached its maximum, and starts decaying afterwards.
+</para>
+
+
+<para>Select <menuchoice><guimenu>File</guimenu><guimenuitem>Execute
+Structure</guimenuitem></menuchoice>. You will hear a nice sequence
+playing. If you have enjoyed the feeling, click
+<guibutton>OK</guibutton> and go to Step 5.
+</para>
+
+</sect2>
+
+<sect2 id="step-5-starting-to-beam-data-around">
+<title>Step 5: Starting to beam data around ;)</title>
+
+<para>Start another &arts-builder;</para>
+
+<para>
+Put a Synth&lowbar;AMAN&lowbar;PLAY into it, configure it to a sane
+name. Put a Synth&lowbar;BUS&lowbar;DOWNLINK into it and:</para>
+
+<orderedlist>
+<listitem>
+<para>
+Set Synth&lowbar;BUS&lowbar;DOWNLINK bus to audio (that is just a name,
+call it fred if you like)
+</para>
+</listitem>
+<listitem>
+<para>
+Connect Synth&lowbar;BUS&lowbar;DOWNLINK left to
+Synth&lowbar;AMAN&lowbar;PLAY left
+</para>
+</listitem>
+<listitem>
+<para>
+Connect Synth&lowbar;BUS&lowbar;DOWNLINK right to
+Synth&lowbar;AMAN&lowbar;PLAY right
+</para>
+</listitem>
+</orderedlist>
+
+<para>
+Start executing the structure. As expected, you hear nothing, ... not
+yet.
+</para>
+
+<para>
+Go back to the structure with the Synth&lowbar;WAVE&lowbar;SIN stuff and
+replace the Synth&lowbar;AMAN&lowbar;PLAY module by an
+Synth&lowbar;BUS&lowbar;UPLINK, and configure the name to audio (or fred
+if you like). Deleting modules works with selecting them and choosing
+<menuchoice><guimenu>Edit</guimenu>
+<guimenuitem>delete</guimenuitem></menuchoice> from the menu (or
+pressing the <keycap>Del</keycap> key).
+</para>
+
+<para>
+Hit <menuchoice><guimenu>File</guimenu> <guilabel>Execute
+structure</guilabel></menuchoice>. You will hear the sequence with
+scaled notes, transported over the bus.
+</para>
+
+<para>
+If you want to find out why something like this can actually be useful,
+click <guibutton>OK</guibutton> (in the &arts-builder; that is executing
+the Synth&lowbar;SEQUENCE stuff, you can leave the other one running)
+and go to Step 6.
+</para>
+</sect2>
+
+<sect2 id="step-6-beaming-for-advanced-users">
+<title>Step 6: Beaming for advanced users</title>
+
+<para>
+Choose <menuchoice><guimenu>File</guimenu><guimenuitem>Rename</guimenuitem>
+</menuchoice> structure from the menu of the artsbuilder which
+contains the Synth&lowbar;SEQUENCE stuff, and call it tutorial. Hit
+<guibutton>OK</guibutton>.
+</para>
+
+<para>
+Choose <menuchoice><guimenu>File</guimenu> <guimenuitem>Save</guimenuitem>
+</menuchoice>
+</para>
+
+<para>
+Start yet another &arts-builder; and choose
+<menuchoice><guimenu>File</guimenu><guimenuitem>Load</guimenuitem>
+</menuchoice>, and load the tutorial again.
+</para>
+
+<para>
+Now you can select
+<menuchoice><guimenu>File</guimenu><guimenuitem>Execute
+structure</guimenuitem> </menuchoice>in both &arts-builder;s having that
+structure. You'll now hear two times the same thing. Depending on the
+time when you start it it will sound more or less nice.
+</para>
+
+<para>
+Another thing that is good to do at this point in time is: start
+&noatun;, and play some <literal role="extension">mp3</literal>. Start
+&artscontrol;. Go to
+<menuchoice><guimenu>View</guimenu><guimenuitem>View audio
+manager</guimenuitem></menuchoice>. What you will see is &noatun; and
+your <quote>tutorial</quote> playback structure playing something. The
+nice thing you can do is this: doubleclick on &noatun;. You'll now get a
+list of available busses. And see? You can assign &noatun; to send it's
+output via the audio bus your tutorial playback structure provides.
+</para>
+</sect2>
+
+<sect2 id="step-7-midi-synthesis">
+<title>Step 7: Midi synthesis</title>
+
+<para>
+Finally, now you should be able to turn your sin wave into an real
+instrument. This only makes sense if you have something handy that could
+send &MIDI; events to &arts;. I'll describe here how you can use some
+external keyboard, but a midibus aware sequence like &brahms; will work
+as well.
+</para>
+
+<para>
+First of all, clean up on your desktop until you only have one
+&arts-builder; with the sine wave structure running (not executing).
+Then, three times go to <menuchoice><guimenu>Ports</guimenu>
+<guisubmenu>Create IN audio signal</guisubmenu></menuchoice>, and three
+times to <menuchoice><guimenu>Ports</guimenu> <guisubmenu>Create OUT
+audio signal</guisubmenu></menuchoice>. Place the ports somewhere.
+</para>
+
+<para>
+Finally, go to <menuchoice><guimenu>Ports</guimenu> <guilabel>Change
+positions and names</guilabel></menuchoice> and call the ports
+frequency, velocity, pressed, left, right, done.
+</para>
+
+<para>
+Finally, you can delete the Synth&lowbar;SEQUENCE module, and rather
+connect connect the frequency input port of the structure to the
+Synth&lowbar;FREQUENCY frequency port. Hm. But what do do about
+pos?</para> <para>We don't have this, because with no algorithm in the
+world, you can predict when the user will release the note he just
+pressed on the midi keyboard. So we rather have a pressed parameter
+instead that just indicates wether the user still holds down the
+key. (pressed = 1: key still hold down, pressed = 0: key
+released)
+</para>
+
+<para>
+That means the Synth&lowbar;PSCALE object also must be replaced
+now. Plug in a Synth&lowbar;ENVELOPE&lowbar;ADSR instead (from
+<menuchoice><guimenu>Modules</guimenu>
+<guisubmenu>Synthesis</guisubmenu> <guisubmenu>Envelopes</guisubmenu>
+</menuchoice>). Connect:
+</para>
+
+<orderedlist>
+<listitem>
+<para>The pressed structure input to the ADSR active</para>
+</listitem>
+<listitem>
+<para>The SIN outvalue to the ADSR invalue</para>
+</listitem>
+<listitem>
+<para>The ADSR outvalue to the left structure output</para>
+</listitem><listitem>
+<para>The ADSR outvalue to the right structure output</para>
+</listitem>
+</orderedlist>
+
+<para>
+Set the parameters attack to 0.1, decay to 0.2, sustain to 0.7, release
+to 0.1.
+</para>
+
+<para>
+Another thing we need to think of is that the instrument structure
+somehow should know when it is ready playing and then be cleaned up,
+because otherwise it would be never stopped even if the note has been
+released. Fortunately, the ADSR envelope knows when the will be nothing
+to hear anymore, since it anyway scales the signal to zero at some point
+after the note has been released.
+</para>
+
+<para>
+This is indicated by setting the done output to 1. So connect this to
+the done output of the structure. The structure will be removed as soon
+as done goes up to 1.
+</para>
+
+<para>
+Rename your structure to instrument_tutorial (from <menuchoice><guimenu>
+File</guimenu> <guimenuitem>Rename
+structure</guimenuitem></menuchoice>. Then, save it using save as (the
+default name offered should be instrument_tutorial
+now).</para><para>Start artscontrol, and go to
+<menuchoice><guimenu>View</guimenu><guimenuitem>Midi
+Manager</guimenuitem></menuchoice>, and choose
+<menuchoice><guimenu>Add</guimenu><guimenuitem>aRts Synthesis Midi
+Output</guimenuitem></menuchoice>. Finally, you should be able to
+select your instrument (tutorial) here.
+</para>
+
+<para>
+Open a terminal and type
+<userinput><command>midisend</command></userinput>. You'll see that
+<command>midisend</command> and the instrument are listed now in the
+&arts; &MIDI; manager. After selecting both and hitting
+<guibutton>connect</guibutton>, we're finally done. Take your keyboard
+and start playing (of course it should be connected to your computer).
+</para>
+</sect2>
+
+<sect2 id="suggestions">
+<title>Suggestions</title>
+
+<para>
+You now should be able to work with &arts;. Here are a few tips what you
+could try to improve with your structures now:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Try using other things than a SIN wave. When you plug in a TRI wave, you
+will most likely think the sound is not too nice. But try appending a
+SHELVE&lowbar;CUTOFF filter right after the TRI wave to cut the
+frequenciesabove a certain frequency (try something like 1000 Hz, or
+even better two times the input frequency or input frequency+200Hz or
+something like that).
+</para>
+</listitem>
+<listitem>
+<para>
+Try using more than one oscillator. Synth&lowbar;XFADE can be used to
+cross fade (mix) two signals, Synth&lowbar;ADD to add them.
+</para>
+</listitem>
+<listitem>
+<para>
+Try setting the frequencies of the oscillators to not exactly the same
+value, that gives nice oscillations.
+</para>
+</listitem>
+<listitem>
+<para>
+Experiment with more than one envelope.
+</para>
+</listitem>
+<listitem>
+<para>
+Try synthesizing instruments with different output left and right.
+</para>
+</listitem>
+<listitem>
+<para>
+Try postprocessing the signal after it comes out the bus downlink. You
+could for instance mix a delayed version of the signal to the original
+to get an echo effect.
+</para>
+</listitem>
+<listitem>
+<para>
+Try using the velocity setting (its the strength with which the note has
+been pressed, you could also say volume). The special effect is always
+when this not only modifies the volume of the resulting signal, but as
+well the sound of the instrument (for instance the cutoff frequency).
+</para>
+</listitem>
+<listitem>
+<para>...</para>
+</listitem>
+</itemizedlist>
+
+<para>
+If you have created something great, please consider providing it for
+the &arts; web page. Or for inclusion into the next release.
+</para>
+</sect2>
+
+</sect1>
+
+<sect1 id="artsbuilder-examples">
+<title>Examples</title>
+
+<para>
+&arts-builder; comes with several examples, which can be opened through
+<menuchoice><guimenu>File</guimenu><guimenuitem>Open
+Example...</guimenuitem> </menuchoice>. Some of them are in the
+folder, some of them (which for some reason don't work with the
+current release) are left in the todo folder.
+</para>
+<para>
+The examples fall into several categories:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Standalone examples illustrating how to use each of the built-in
+arts modules (named <filename>example_*.arts</filename>). These
+typically send some output to a sound card.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Instruments built from lower level arts modules (named
+<filename>instrument_*.arts</filename>). These following a standard
+convention for input and output ports so they can be used by the &MIDI;
+manager in &artscontrol;.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Templates for creating new modules (names
+<filename>template_*.arts</filename>).
+</para>
+</listitem>
+
+<listitem>
+<para>
+Effects which can be used as reusable building blocks (named
+<filename>effect_*.arts</filename>) [ all in todo ]
+</para>
+</listitem>
+
+<listitem>
+<para>
+Mixer elements used for creating mixers, including graphical
+controls (named <filename>mixer_element_*.arts</filename>). [ all in todo ]
+</para>
+</listitem>
+
+<listitem>
+<para>
+Miscellaneous modules that don't fit into any of the above categories.
+</para>
+</listitem>
+</itemizedlist>
+
+<variablelist>
+<title>Detailed Description Of Each Module:</title>
+<varlistentry>
+<term><filename>example_stereo_beep.arts</filename></term>
+<listitem>
+<para>
+Generates a 440Hz sine wave tone in the left channel and an 880Hz sine
+wave tone in the right channel, and sends it to the sound card
+output. This is referenced in the &arts; documentation.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_sine.arts</filename></term>
+<listitem>
+<para>
+Generates a 440 Hz sine wave.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_pulse.arts</filename></term>
+<listitem>
+<para>
+Generates a 440 Hz pulse wave with a 20% duty cycle.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_softsaw.arts</filename></term>
+<listitem>
+<para>
+Generates a 440 Hz sawtooth wave.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_square.arts</filename></term>
+<listitem>
+<para>
+Generates a 440 Hz square wave.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_tri.arts</filename></term>
+<listitem>
+<para>
+Generates a 440 Hz triangle wave.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_noise.arts</filename></term>
+<listitem>
+<para>
+Generates white noise.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_dtmf1.arts</filename></term>
+<listitem>
+<para>
+Generates a dual tone by producing 697 and 1209 Hz sine waves, scaling
+them by 0.5, and adding them together. This is the DTMF tone for the
+digit "1" on a telephone keypad.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_atan_saturate.arts</filename></term>
+<listitem>
+<para>
+Runs a triangle wave through the atan saturate filter.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_autopanner.arts</filename></term>
+<listitem>
+<para>
+Uses an autopanner to pan a 400 Hz sine wave between the left and right
+speakers at a 2 Hz rate.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_brickwall.arts</filename></term>
+<listitem>
+<para>
+Scales a sine wave by a factor of 5 and then runs it through a brickwall
+limiter.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_bus.arts</filename></term>
+<listitem>
+<para>
+Downlinks from a bus called <quote>Bus</quote> and uplinks to the bus
+<quote>out_soundcard</quote> with the left and right channels reversed.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_cdelay.arts</filename></term>
+<listitem>
+<para>
+Downlinks from a bus called <quote>Delay</quote>, uplinks the right
+channel through a 0.5 second cdelay, and the left channel unchanged. You
+can use &artscontrol; to connect the effect to a sound player and
+observe the results.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_delay.arts</filename></term>
+<listitem>
+<para>
+This is the same as <filename>example_cdelay.arts</filename> but used
+the delay effect.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_capture_wav.arts</filename></term>
+<listitem>
+<para>
+This uses the Synth_CAPTURE_WAV to save a 400 Hz sine wave as a wav
+file. Run the module for a few seconds, and then examine the file
+created in <filename class="directory">/tmp</filename>. You can play the
+file with a player such as <application>kaiman</application>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_data.arts</filename></term>
+<listitem>
+<para>
+This uses the Data module to generate a constant stream of the value
+<quote>3</quote> and sends it to a Debug module to periodically display
+it. It also contains a Nil module, illustrating how it can be used to do
+nothing at all.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_adsr.arts</filename></term>
+<listitem>
+<para>
+Shows how to create a simple instrument sound using the Envelope Adsr
+module, repetitively triggered by a square wave.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_fm.arts</filename></term>
+<listitem>
+<para>
+This uses the FM Source module to generate a 440 Hz sine wave which is
+frequency modulated at a 5 Hz rate.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_freeverb.arts</filename></term>
+<listitem>
+<para>
+This connects the Freeverb effect from a bus downlink to a bus
+outlink. You can use artscontrol to connect the effect to a sound player
+and observe the results.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_flanger.arts</filename></term>
+<listitem>
+<para>
+This implements a simple flanger effect (it doesn't appear to work yet,
+though).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_moog.arts</filename></term>
+<listitem>
+<para>
+This structure combines the two channels from a bus into one, passes it
+though the Moog VCF filter, and sends it out the out_soundcard bus.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_pitch_shift.arts</filename></term>
+<listitem>
+<para>
+This structure passes the left channel of sound card data through the
+Pitch Shift effect. Adjust the speed parameter to vary the effect.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_rc.arts</filename></term>
+<listitem>
+<para>
+This structure passes a white noise generator though an RC filter and
+out to the sound card. By viewing the FFT Scope display in artscontrol
+you can see how this varies from an unfiltered noise waveform.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_sequence.arts</filename></term>
+<listitem>
+<para>
+This demonstrates the Sequence module by playing a sequence of notes.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_shelve_cutoff.arts</filename></term>
+<listitem>
+<para>
+This structure passes a white noise generator though a Shelve Cutoff
+filter and out to the sound card. By viewing the FFT Scope display in
+artscontrol you can see how this varies from an unfiltered noise
+waveform.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_equalizer.arts</filename></term>
+<listitem>
+<para>
+This demonstrates the Std_Equalizer module. It boosts the low and high
+frequencies by 6 dB.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_tremolo.arts</filename></term>
+<listitem>
+<para>
+This demonstrates the Tremolo effect. It modulates the left and right
+channels using a 10 Hz tremolo.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_xfade.arts</filename></term>
+<listitem>
+<para>
+This example mixes 440 and 880 Hz sine waves using a cross fader.
+Adjust the value of the cross fader's percentage input from -1 to 1 to
+control the mixing of the two signals.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_pscale.arts</filename></term>
+<listitem>
+<para>
+This illustrates the Pscale module (I'm not sure if this is a
+meaningful example).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><filename>example_play_wav.arts</filename></term>
+<listitem>
+<para>
+This illustrates the Play Wave module. You will need to
+enter the full path to a <literal role="extension">.wav</literal> file
+as the filename parameter.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>example_multi_add.arts</term>
+<listitem>
+<para>
+This shows the Multi Add module which accepts any number of inputs. It
+sums three Data modules which produce inputs of 1, 2, and 3, and
+displays the result 6.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+</sect1>
+</chapter>
diff --git a/doc/artsbuilder/detail.docbook b/doc/artsbuilder/detail.docbook
new file mode 100644
index 00000000..c7ed7319
--- /dev/null
+++ b/doc/artsbuilder/detail.docbook
@@ -0,0 +1,1765 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="arts-in-detail">
+<title>&arts; in Detail</title>
+
+<sect1 id="architecture">
+<title>Architecture</title>
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="arts-structure.png" format="PNG"/>
+</imageobject>
+<textobject><phrase>The &arts; structure.</phrase></textobject>
+</mediaobject>
+</sect1>
+
+<sect1 id="modules-ports">
+<title>Modules &amp; Ports</title>
+
+<para>
+The idea of &arts; is, that synthesis can be done using small modules,
+which only do one thing, and then recombine them in complex
+structures. The small modules normally have inputs, where they can get
+some signals or parameters, and outputs, where they produce some
+signals.
+</para>
+
+<para>
+One module (Synth&lowbar;ADD) for instance just takes the two signals at
+it's input and adds them together. The result is available as output
+signal. The places where modules provide their input/output signals are
+called ports.
+</para>
+
+</sect1>
+
+<sect1 id="structures">
+<title>Structures</title>
+
+<para>
+A structure is a combination of connected modules, some of which may
+have parameters coded directly to their input ports, others which may be
+connected, and others, which are not connected at all.
+</para>
+
+<para>
+What you can do with &arts-builder; is describing structures. You
+describe, which modules you want to be connected with which other
+modules. When you are done, you can save that structure description to a
+file, or tell &arts; to create such a structure you described (Execute).
+</para>
+
+<para>
+Then you'll probably hear some sound, if you did everything the right
+way.
+</para>
+</sect1>
+
+<!-- TODO
+
+<sect1 id="streams">
+<title>Streams</title>
+<para>
+</para>
+</sect1>
+
+-->
+
+<sect1 id="latency">
+<title>Latency</title>
+
+<sect2 id="what-islatency">
+<title>What Is Latency?</title>
+
+<para>
+Suppose you have an application called <quote>mousepling</quote>(that
+should make a <quote>pling</quote> sound if you click on a button. The
+latency is the time between your finger clicking the mouse button and
+you hearing the pling. The latency in this setup composes itself out of
+certain latencies, that have different causes.
+</para>
+
+</sect2>
+
+<sect2 id="latenbcy-simple">
+<title>Latency in Simple Applications</title>
+
+<para>
+In this simple application, latency occurs at these places:
+</para>
+
+<itemizedlist>
+
+<listitem>
+<para>
+The time until the kernel has notified the X11 server that a mouse
+button was pressed.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time until the X11 server has notified your application that a mouse
+button was pressed.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time until the mousepling application has decided that this button
+is worth playing a pling.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time it takes the mousepling application to tell the soundserver
+that it should play a pling.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time it takes for the pling (which the soundserver starts mixing to
+the other output at once) to go through the buffered data, until it
+really reaches the position where the soundcard plays.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time it takes the pling sound from the speakers to reach your ear.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+The first three items are latencies external to &arts;. They are
+interesting, but beyond the scope of this document. Nevertheless be
+aware that they exist, so that even if you have optimized everything
+else to really low values, you may not necessarily get exactly the
+result you calculated.
+</para>
+
+<para>
+Telling the server to play something involves usually one single &MCOP;
+call. There are benchmarks which confirm that, on the same host with
+unix domain sockets, telling the server to play something can be done
+about 9000 times in one second with the current implementation. I expect
+that most of this is kernel overhead, switching from one application to
+another. Of course this value changes with the exact type of the
+parameters. If you transfer a whole image with one call, it will be
+slower than if you transfer only one long value. For the returncode the
+same is true. However for ordinary strings (such as the filename of the
+<literal role="extension">wav</literal> file to play) this shouldn't be
+a problem.
+</para>
+
+<para>
+That means, we can approximate this time with 1/9000 sec, that is below
+0.15 ms. We'll see that this is not relevant.
+</para>
+
+<para>
+Next is the time between the server starting playing and the soundcard
+getting something. The server needs to do buffering, so that when other
+applications are running, such as your X11 server or
+<quote>mousepling</quote> application no dropouts are heard. The way
+this is done under &Linux; is that there are a number fragments of a
+size. The server will refill fragments, and the soundcard will play
+fragments.
+</para>
+
+<para>
+So suppose there are three fragments. The server refills the first, the
+soundcard starts playing it. The server refills the second. The server
+refills the third. The server is done, other applications can do
+something now.
+</para>
+
+<para>
+As the soundcard has played the first fragment, it starts playing the
+second and the server starts refilling the first. And so on.
+</para>
+
+<para>
+The maximum latency you get with all that is (number of fragments)*(size
+of each fragment)/(samplingrate * (size of each sample)). Suppose we
+assume 44kHz stereo, and 7 fragments a 1024 bytes (the current aRts
+defaults), we get 40 ms.
+</para>
+
+<para>
+These values can be tuned according to your needs. However, the
+<acronym>CPU</acronym> usage increases with smaller latencies, as the
+sound server needs to refill the buffers more often, and in smaller
+parts. It is also mostly impossible to reach better values without
+giving the soundserver realtime priority, as otherwise you'll often get
+drop-outs.
+</para>
+
+<para>
+However, it is realistic to do something like 3 fragments with 256 bytes
+each, which would make this value 4.4 ms. With 4.4ms delay the idle
+<acronym>CPU</acronym> usage of &arts; would be about 7.5%. With 40ms delay, it would be
+about 3% (of a PII-350, and this value may depend on your soundcard,
+kernel version and others).
+</para>
+
+<para>
+Then there is the time it takes the pling sound to get from the speakers
+to your ear. Suppose your distance from the speakers is 2 meters. Sound
+travels at a speed of 330 meters per second. So we can approximate this
+time with 6 ms.
+</para>
+
+</sect2>
+
+<sect2 id="latency-streaming">
+<title>Latency in Streaming Applications</title>
+
+<para>
+Streaming applications are those that produce their sound themselves.
+Assume a game, which outputs a constant stream of samples, and should
+now be adapted to replay things via &arts;. To have an example: when I
+press a key, the figure which I am playing jumps, and a boing sound is
+played.
+</para>
+
+<para>
+First of all, you need to know how &arts; does streaming. Its very
+similar to the I/O with the soundcard. The game sends some packets with
+samples to the sound server. Lets say three packets. As soon as the
+sound server is done with the first packet, it sends a confirmation back
+to the game that this packet is done.
+</para>
+
+<para>
+The game creates another packet of sound and sends it to the server.
+Meanwhile the server starts consuming the second sound packet, and so
+on. The latency here looks similar like in the simple case:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+The time until the kernel has notified the X11 server that a key was
+pressed.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time until the X11 server has notified the game that a key was
+pressed.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time until the game has decided that this key is worth playing a
+boing.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time until the packet of sound in which the game has started putting
+the boing sound is reaching the sound server.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time it takes for the boing (which the soundserver starts mixing to
+the other output at once) to go through the buffered data, until it
+really reaches the position where the soundcard plays.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The time it takes the boing sound from the speakers to
+reach your ear.
+</para>
+</listitem>
+
+</itemizedlist>
+
+<para>
+The external latencies, as above, are beyond the scope of this document.
+</para>
+
+<para>
+Obviously, the streaming latency depends on the time it takes all
+packets that are used for streaming to be played once. So it is (number
+of packets)*(size of each packet)/(samplingrate * (size of each sample))
+</para>
+
+<para>
+As you see that is the same formula as applies for the
+fragments. However for games, it makes no sense to do such small delays
+as above. I'd say a realistic configuration for games would be 2048
+bytes per packet, use 3 packets. The resulting latency would be 35ms.
+</para>
+
+<para>
+This is based on the following: assume that the game renders 25 frames
+per second (for the display). It is probably safe to assume that you
+won't notice a difference of sound output of one frame. Thus 1/25 second
+delay for streaming is acceptable, which in turn means 40ms would be
+okay.
+</para>
+
+<para>
+Most people will also not run their games with realtime priority, and
+the danger of drop-outs in the sound is not to be neglected. Streaming
+with 3 packets a 256 bytes is possible (I tried that) - but causes a lot
+of <acronym>CPU</acronym> usage for streaming.
+</para>
+
+<para>
+For server side latencies, you can calculate these exactly as above.
+</para>
+
+</sect2>
+
+<sect2 id="cpu-usage">
+<title>Some <acronym>CPU</acronym> usage considerations</title>
+
+<para>
+There are a lot of factors which influence _<acronym>CPU</acronym> usage
+in a complex scenario, with some streaming applications and some others,
+some plugins on the server etc. To name a few:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Raw <acronym>CPU</acronym> usage by the calculations necessary.
+</para>
+</listitem>
+
+<listitem>
+<para>
+&arts; internal scheduling overhead - how &arts; decides when which
+module should calculate what.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Integer to float conversion overhead.
+</para>
+</listitem>
+
+<listitem>
+<para>
+&MCOP;0 protocol overhead.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Kernel: process/context switching.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Kernel: communication overhead
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+For raw <acronym>CPU</acronym> usage for calculations, if you play two
+streams, simultaneously you need to do additions. If you apply a filter,
+some calculations are involved. To have a simplified example, adding two
+streams involves maybe four <acronym>CPU</acronym> cycles per addition,
+on a 350Mhz processor, this is 44100*2*4/350000000 = 0.1%
+<acronym>CPU</acronym> usage.
+</para>
+
+<para>
+&arts; internal scheduling: &arts; needs to decide which plugin when
+calculates what. This takes time. Take a profiler if you are interested
+in that. Generally what can be said is: the less realtime you do
+(&ie;. the larger blocks can be calculated at a time) the less
+scheduling overhead you have. Above calculating blocks of 128 samples at
+a time (thus using fragment sizes of 512 bytes) the scheduling overhead
+is probably not worth thinking about it.
+</para>
+
+<para>
+Integer to float conversion overhead: &arts; uses floats internally as
+data format. These are easy to handle and on recent processors not
+slower than integer operations. However, if there are clients which play
+data which is not float (like a game that should do its sound output via
+&arts;), it needs to be converted. The same applies if you want to
+replay the sounds on your soundcard. The soundcard wants integers, so
+you need to convert.
+</para>
+
+<para>
+Here are numbers for a Celeron, approx. ticks per sample, with -O2 +egcs
+2.91.66 (taken by Eugene Smith <email>hamster@null.ru</email>). This is
+of course highly processor dependant:
+</para>
+
+<programlisting>
+convert_mono_8_float: 14
+convert_stereo_i8_2float: 28
+convert_mono_16le_float: 40
+interpolate_mono_16le_float: 200
+convert_stereo_i16le_2float: 80
+convert_mono_float_16le: 80
+</programlisting>
+
+<para>
+So that means 1% <acronym>CPU</acronym> usage for conversion and 5% for
+interpolation on this 350 MHz processor.
+</para>
+
+<para>
+&MCOP; protocol overheadL &MCOP; does, as a rule of thumb, 9000
+invocations per second. Much of this is not &MCOP;s fault, but relates
+to the two kernel causes named below. However, this gives a base to do
+calculations what the cost of streaming is.
+</para>
+
+<para>
+Each data packet transferred through streaming can be considered one
+&MCOP; invocation. Of course large packets are slower than 9000
+packets/s, but its about the idea.
+</para>
+
+<para>
+Suppose you use packet sizes of 1024 bytes. Thus, to transfer a stream
+with 44kHz stereo, you need to transfer 44100*4/1024 = 172 packets per
+second. Suppose you could with 100% cpu usage transfer 9000 packets,
+then you get (172*100)/9000 = 2% <acronym>CPU</acronym> usage due to
+streaming with 1024 byte packets.
+</para>
+
+<para>
+That are approximations. However, they show, that you would be much
+better off (if you can afford it for the latency), to use for instance
+packets of 4096 bytes. We can make a compact formula here, by
+calculating the packet size which causes 100% <acronym>CPU</acronym> usage as
+44100*4/9000 = 19.6 samples, and thus getting the quick formula:
+</para>
+
+<para>
+streaming <acronym>CPU</acronym> usage in percent = 1960/(your packet size)
+</para>
+
+<para>
+which gives us 0.5% <acronym>CPU</acronym> usage when streaming with 4096 byte packets.
+</para>
+
+<para>
+Kernel process/context switching: this is part of the &MCOP; protocol
+overhead. Switching between two processes takes time. There is new
+memory mapping, the caches are invalid, whatever else (if there is a
+kernel expert reading this - let me know what exactly are the causes).
+This means: it takes time.
+</para>
+
+<para>
+I am not sure how many context switches &Linux; can do per second, but
+that number isn't infinite. Thus, of the &MCOP; protocol overhead I
+suppose quite a bit is due to context switching. In the beginning of
+&MCOP;, I did tests to use the same communication inside one process,
+and it was much faster (four times as fast or so).
+</para>
+
+<para>
+Kernel: communication overhead: This is part of the &MCOP; protocol
+overhead. Transferring data between processes is currently done via
+sockets. This is convenient, as the usual select() methods can be used
+to determine when a message has arrived. It can also be combined with
+other I/O sources as audio I/O, X11 server or whatever else easily.
+</para>
+
+<para>
+However, those read and write calls cost certainly processor cycles. For
+small invocations (such as transferring one midi event) this is probably
+not so bad, for large invocations (such as transferring one video frame
+with several megabytes) this is clearly a problem.
+</para>
+
+<para>
+Adding the usage of shared memory to &MCOP; where appropriate is
+probably the best solution. However it should be done transparent to the
+application programmer.
+</para>
+
+<para>
+Take a profiler or do other tests to find out how much exactly
+current audio streaming is impacted by the not using sharedmem. However,
+its not bad, as audio streaming (replaying mp3) can be done with 6%
+total <acronym>CPU</acronym> usage for &artsd; and
+<application>artscat</application> (and 5% for the mp3
+decoder). However, this includes all things from the necessary
+calculations up do the socket overhead, thus I'd say in this setup you
+could perhaps save 1% by using sharedmem.
+</para>
+
+</sect2>
+
+<sect2 id="hard-numbers">
+<title>Some Hard Numbers</title>
+
+<para>
+These are done with the current development snapshot. I also wanted to
+try out the real hard cases, so this is not what everyday applications
+should use.
+</para>
+
+<para>
+I wrote an application called streamsound which sends streaming data to
+&arts;. Here it is running with realtime priority (without problems),
+and one small serverside (volume-scaling and clipping) plugin:
+</para>
+
+<programlisting>
+ 4974 stefan 20 0 2360 2360 1784 S 0 17.7 1.8 0:21 artsd
+ 5016 stefan 20 0 2208 2208 1684 S 0 7.2 1.7 0:02 streamsound
+ 5002 stefan 20 0 2208 2208 1684 S 0 6.8 1.7 0:07 streamsound
+ 4997 stefan 20 0 2208 2208 1684 S 0 6.6 1.7 0:07 streamsound
+</programlisting>
+
+<para>
+Each of them is streaming with 3 fragments a 1024 bytes (18 ms). There
+are three such clients running simultaneously. I know that that does
+look a bit too much, but as I said: take a profiler and find out what
+costs time, and if you like, improve it.
+</para>
+
+<para>
+However, I don't think using streaming like that is realistic or makes
+sense. To take it even more to the extreme, I tried what would be the
+lowest latency possible. Result: you can do streaming without
+interruptions with one client application, if you take 2 fragments of
+128 bytes between aRts and the soundcard, and between the client
+application and aRts. This means that you have a total maximum latency
+of 128*4/44100*4 = 3 ms, where 1.5 ms is generated due to soundcard I/O
+and 1.5 ms is generated through communication with &arts;. Both
+applications need to run realtimed.
+</para>
+
+<para>
+But: this costs an enormous amount of
+<acronym>CPU</acronym>. This example cost you about 45% of my
+P-II/350. I also starts to click if you start top, move windows on your
+X11 display or do disk I/O. All these are kernel issues. The problem is
+that scheduling two or more applications with realtime priority cost you
+an enormous amount of effort, too, even more if the communicate, notify
+each other &etc;.
+</para>
+
+<para>
+Finally, a more real life example. This is &arts; with artsd and one
+artscat (one streaming client) running 16 fragments a 4096 bytes:
+</para>
+
+<programlisting>
+ 5548 stefan 12 0 2364 2364 1752 R 0 4.9 1.8 0:03 artsd
+ 5554 stefan 3 0 752 752 572 R 0 0.7 0.5 0:00 top
+ 5550 stefan 2 0 2280 2280 1696 S 0 0.5 1.7 0:00 artscat
+</programlisting>
+
+</sect2>
+</sect1>
+
+<!-- TODO
+
+<sect1 id="dynamic-instantiation">
+<title>Dynamic Instantiation</title>
+<para>
+</para>
+</sect1>
+
+-->
+
+<sect1 id="busses">
+<title>Busses</title>
+
+<para>
+Busses are dynamically built connections that transfer audio. Basically,
+there are some uplinks and some downlinks. All signals from the uplinks
+are added and send to the downlinks.
+</para>
+
+<para>
+Busses as currently implemented operate in stereo, so you can only
+transfer stereo data over busses. If you want mono data, well, transfer
+it only over one channel and set the other to zero or whatever. What
+you need to to, is to create one or more Synth&lowbar;BUS&lowbar;UPLINK
+objects and tell them a bus name, to which they should talk (&eg;
+<quote>audio</quote> or <quote>drums</quote>). Simply throw the data in
+there.
+</para>
+
+<para>
+Then, you'll need to create one or more Synth&lowbar;BUS&lowbar;DOWNLINK
+objects, and tell them the bus name (<quote>audio</quote> or
+<quote>drums</quote> ... if it matches, the data will get through), and
+the mixed data will come out again.
+</para>
+
+<para>
+The uplinks and downlinks can reside in different structures, you can
+even have different &arts-builder;s running and start an uplink in one
+and receive the data from the other with a downlink.
+</para>
+
+<para>
+What is nice about busses is, that they are fully dynamic. Clients can
+plug in and out on the fly. There should be no clicking or noise as this
+happens.
+</para>
+
+<para>
+Of course, you should not plug out a client playing a signal, since it
+will probably not be a zero level when plugged out the bus, and then it
+will click.
+</para>
+</sect1>
+
+<!-- TODO
+<sect1 id="network-ransparency">
+<title>Network Transparency</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="security">
+<title>Security</title>
+<para>
+</para>
+</sect1>
+
+
+<sect1 id="effects">
+<title>Effects and Effect Stacks</title>
+<para>
+</para>
+</sect1>
+
+-->
+<sect1 id="trader">
+<title>Trader</title>
+
+<para>
+&arts;/&MCOP; heavily relies on splitting up things into small
+components. This makes things very flexible, as you can extend the
+system easily by adding new components, which implement new effects,
+fileformats, oscillators, gui elements, ... As almost everything is a
+component, almost everything can be extended easily, without changing
+existing sources. New components can be simply loaded dynamically to
+enhance already existing applications.
+</para>
+
+<para>
+However, to make this work, two things are required:
+</para>
+
+<itemizedlist>
+
+<listitem>
+<para>
+Components must advertise themselves - they must describe what great
+things they offer, so that applications will be able to use them.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Applications must actively look for components that they could use,
+instead of using always the same thing for some task.
+</para>
+</listitem>
+
+</itemizedlist>
+
+<para>
+The combination of this: components which say <quote>here I am, I am
+cool, use me</quote>, and applications (or if you like, other
+components) which go out and look which component they could use to get
+a thing done, is called trading.
+</para>
+
+<para>
+In &arts;, components describe themselves by specifying values that they
+<quote>support</quote> for properties. A typical property for a
+file-loading component could be the extension of the files that it can
+process. Typical values could be <literal
+role="extension">wav</literal>, <literal role="extension">aiff</literal>
+or <literal role="extension">mp3</literal>.
+</para>
+
+<para>
+In fact, every component may choose to offer many different values for
+one property. So one single component could offer reading both, <literal
+role="extension">wav</literal> and <literal
+role="extension">aiff</literal> files, by specifying that it supports
+these values for the property <quote>Extension</quote>.
+</para>
+
+<para>
+To do so, a component has to place a <literal
+role="extension">.mcopclass</literal> file at an appropriate place,
+containing the properties it supports, for our example, this could look
+like this (and would be installed in
+<filename><replaceable>componentdir</replaceable>/Arts/WavPlayObject.mcopclass</filename>):
+</para>
+
+<programlisting>
+Interface=Arts::WavPlayObject,Arts::PlayObject,Arts::SynthModule,Arts::Object
+Author="Stefan Westerfeld &lt;stefan@space.twc.de&gt;"
+URL="http://www.arts-project.org"
+Extension=wav,aiff
+MimeType=audio/x-wav,audio/x-aiff
+</programlisting>
+
+<para>
+It is important that the filename of the <literal
+role="extension">.mcopclass</literal>-file also says what the interface
+of the component is called like. The trader doesn't look at the contents
+at all, if the file (like here) is called
+<filename>Arts/WavPlayObject.mcopclass</filename>, the component
+interface is called <interfacename>Arts::WavPlayObject</interfacename>
+(modules map to folders).
+</para>
+
+<para>
+To look for components, there are two interfaces (which are defined in
+<filename>core.idl</filename>, so you have them in every application),
+called <interfacename>Arts::TraderQuery</interfacename> and
+<interfacename>Arts::TraderOffer</interfacename>. You to go on a
+<quote>shopping tour</quote> for components like this:
+</para>
+
+<orderedlist>
+<listitem>
+<para>
+Create a query object:
+</para>
+<programlisting>
+ Arts::TraderQuery query;
+</programlisting>
+</listitem>
+
+<listitem>
+<para>
+Specify what you want. As you saw above, components describe themselves
+using properties, for which they offer certain values. So specifying
+what you want is done by selecting components that support a certain
+value for a property. This is done using the supports method of a
+TraderQuery:
+</para>
+
+<programlisting>
+ query.supports("Interface","Arts::PlayObject");
+ query.supports("Extension","wav");
+</programlisting>
+</listitem>
+
+<listitem>
+<para>
+Finally, perform the query using the query method. Then, you'll
+(hopefully) get some offers:
+</para>
+
+<programlisting>
+ vector&lt;Arts::TraderOffer&gt; *offers = query.query();
+</programlisting>
+</listitem>
+
+<listitem>
+<para>
+Now you can examine what you found. Important is the interfaceName
+method of TraderOffer, which will tell you the name of the component,
+that matched the query. You can also find out further properties by
+getProperty. The following code will simply iterate through all
+components, print their interface names (which could be used for
+creation), and delete the results of the query again:
+</para>
+<programlisting>
+ vector&lt;Arts::TraderOffer&gt;::iterator i;
+ for(i = offers-&gt;begin(); i != offers-&gt;end(); i++)
+ cout &lt;&lt; i-&gt;interfaceName() &lt;&lt; endl;
+ delete offers;
+</programlisting>
+</listitem>
+</orderedlist>
+
+<para>
+For this kind of trading service to be useful, it is important to
+somehow agree on what kinds of properties components should usually
+define. It is essential that more or less all components in a certain
+area use the same set of properties to describe themselves (and the same
+set of values where applicable), so that applications (or other
+components) will be able to find them.
+</para>
+
+<para>
+Author (type string, optional): This can be used to ultimately let the
+world know that you wrote something. You can write anything you like in
+here, e-mail address is of course helpful.
+</para>
+
+<para>
+Buildable (type boolean, recommended): This indicates whether the
+component is usable with <acronym>RAD</acronym> tools (such as
+&arts-builder;) which use components by assigning properties and
+connecting ports. It is recommended to set this value to true for
+almost any signal processing component (such as filters, effects,
+oscillators, ...), and for all other things which can be used in
+<acronym>RAD</acronym> like fashion, but not for internal stuff like for
+instance <interfacename>Arts::InterfaceRepo</interfacename>.
+</para>
+
+<para>
+Extension (type string, used where relevant): Everything dealing with
+files should consider using this. You should put the lowercase version
+of the file extension without the <quote>.</quote> here, so something
+like <userinput>wav</userinput> should be fine.
+</para>
+
+<para>
+Interface (type string, required): This should include the full list of
+(useful) interfaces your components supports, probably including
+<interfacename>Arts::Object</interfacename> and if applicable
+<interfacename>Arts::SynthModule</interfacename>.
+</para>
+
+<para>
+Language (type string, recommended): If you want your component to be
+dynamically loaded, you need to specify the language here. Currently,
+the only allowed value is <userinput>C++</userinput>, which means the
+component was written using the normal C++ <acronym>API</acronym>. If
+you do so, you'll also need to set the <quote>Library</quote> property
+below.
+</para>
+
+<para>
+Library (type string, used where relevant): Components written in C++
+can be dynamically loaded. To do so, you have to compile them into a
+dynamically loadable libtool (<literal role="extension">.la</literal>)
+module. Here, you can specify the name of the <literal
+role="extension">.la</literal>-File that contains your component.
+Remember to use REGISTER_IMPLEMENTATION (as always).
+</para>
+
+<para>
+MimeType (type string, used where relevant): Everything dealing with
+files should consider using this. You should put the lowercase version
+of the standard mimetype here, for instance
+<userinput>audio/x-wav</userinput>.
+</para>
+
+<para>
+&URL; (type string, optional): If you like to let people know where they
+can find a new version of the component (or a homepage or anything), you
+can do it here. This should be standard &HTTP; or &FTP; &URL;.
+</para>
+
+</sect1>
+
+<!-- TODO
+<sect1 id="midi-synthesis">
+<title><acronym>MIDI</acronym> Synthesis</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="instruments">
+<title>Instruments</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="session-management">
+<title>Session Management</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="full-duplex">
+<title>Full duplex Audio</title>
+<para>
+</para>
+</sect1>
+-->
+
+<sect1 id="namespaces">
+<title>Namespaces in &arts;</title>
+
+<sect2 id="namespaces-intro">
+<title>Introduction</title>
+
+<para>
+Each namespace declaration corresponds to a <quote>module</quote>
+declaration in the &MCOP; &IDL;.
+</para>
+
+<programlisting>
+// mcop idl
+
+module M {
+ interface A
+ {
+ }
+};
+
+interface B;
+</programlisting>
+
+<para>
+In this case, the generated C++ code for the &IDL; snippet would look
+like this:
+</para>
+
+<programlisting>
+// C++ header
+
+namespace M {
+ /* declaration of A_base/A_skel/A_stub and similar */
+ class A { // Smartwrapped reference class
+ /* [...] */
+ };
+}
+
+/* declaration of B_base/B_skel/B_stub and similar */
+class B {
+ /* [...] */
+};
+</programlisting>
+
+<para>
+So when referring the classes from the above example in your C++ code,
+you would have to write <classname>M::A</classname>, but only
+B. However, you can of course use <quote>using M</quote> somewhere -
+like with any namespace in C++.
+</para>
+
+</sect2>
+
+<sect2 id="namespaces-how">
+<title>How &arts; uses namespaces</title>
+
+<para>
+There is one global namespace called <quote>Arts</quote>, which all
+programs and libraries that belong to &arts; itself use to put their
+declarations in. This means, that when writing C++ code that depends on
+&arts;, you normally have to prefix every class you use with
+<classname>Arts::</classname>, like this:
+</para>
+
+<programlisting>
+int main(int argc, char **argv)
+{
+ Arts::Dispatcher dispatcher;
+ Arts::SimpleSoundServer server(Arts::Reference("global:Arts_SimpleSoundServer"));
+
+ server.play("/var/foo/somefile.wav");
+</programlisting>
+
+<para>
+The other alternative is to write a using once, like this:
+</para>
+
+<programlisting>
+using namespace Arts;
+
+int main(int argc, char **argv)
+{
+ Dispatcher dispatcher;
+ SimpleSoundServer server(Reference("global:Arts_SimpleSoundServer"));
+
+ server.play("/var/foo/somefile.wav");
+ [...]
+</programlisting>
+
+<para>
+In &IDL; files, you don't exactly have a choice. If you are writing code
+that belongs to &arts; itself, you'll have to put it into module &arts;.
+</para>
+
+<programlisting>
+// IDL File for aRts code:
+#include &lt;artsflow.idl&gt;
+module Arts { // put it into the Arts namespace
+ interface Synth_TWEAK : SynthModule
+ {
+ in audio stream invalue;
+ out audio stream outvalue;
+ attribute float tweakFactor;
+ };
+};
+</programlisting>
+
+<para>
+If you write code that doesn't belong to &arts; itself, you should not
+put it into the <quote>Arts</quote> namespace. However, you can make an
+own namespace if you like. In any case, you'll have to prefix classes
+you use from &arts;.
+</para>
+
+<programlisting>
+// IDL File for code which doesn't belong to aRts:
+#include &lt;artsflow.idl&gt;
+
+// either write without module declaration, then the generated classes will
+// not use a namespace:
+interface Synth_TWEAK2 : Arts::SynthModule
+{
+ in audio stream invalue;
+ out audio stream outvalue;
+ attribute float tweakFactor;
+};
+
+// however, you can also choose your own namespace, if you like, so if you
+// write an application "PowerRadio", you could for instance do it like this:
+module PowerRadio {
+ struct Station {
+ string name;
+ float frequency;
+ };
+
+ interface Tuner : Arts::SynthModule {
+ attribute Station station; // no need to prefix Station, same module
+ out audio stream left, right;
+ };
+};
+</programlisting>
+
+</sect2>
+
+<sect2 id="namespaces-implementation">
+<title>Internals: How the Implementation Works</title>
+
+<para>
+Often, in interfaces, casts, method signatures and similar, &MCOP; needs
+to refer to names of types or interfaces. These are represented as
+string in the common &MCOP; datastructures, while the namespace is
+always fully represented in the C++ style. This means the strings would
+contain <quote>M::A</quote> and <quote>B</quote>, following the example
+above.
+</para>
+
+<para>
+Note this even applies if inside the &IDL; text the namespace qualifiers
+were not given, since the context made clear which namespace the
+interface <interfacename>A</interfacename> was meant to be used in.
+</para>
+
+</sect2>
+</sect1>
+
+<sect1 id="threads">
+<title>Threads in &arts;</title>
+
+<sect2 id="threads-basics">
+<title>Basics</title>
+
+<para>
+Using threads isn't possible on all platforms. This is why &arts; was
+originally written without using threading at all. For almost all
+problems, for each threaded solution to the problem, there is a
+non-threaded solution that does the same.
+</para>
+
+<para>
+For instance, instead of putting audio output in a separate thread, and
+make it blocking, &arts; uses non-blocking audio output, and figures out
+when to write the next chunk of data using
+<function>select()</function>.
+</para>
+
+<para>
+However, &arts; (in very recent versions) at least provides support for
+people who do want to implement their objects using threads. For
+instance, if you already have code for an <literal
+role="extension">mp3</literal> player, and the code expects the <literal
+role="extension">mp3</literal> decoder to run in a separate thread, it's
+usually the easiest thing to do to keep this design.
+</para>
+
+<para>
+The &arts;/&MCOP; implementation is built along sharing state between
+separate objects in obvious and non-obvious ways. A small list of shared
+state includes:
+</para>
+
+<itemizedlist>
+<listitem><para>
+The Dispatcher object which does &MCOP; communication.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The Reference counting (Smartwrappers).
+</para>
+</listitem>
+
+<listitem>
+<para>
+The IOManager which does timer and fd watches.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The ObjectManager which creates objects and dynamically loads plugins.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The FlowSystem which calls calculateBlock in the appropriate situations.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+All of the above objects don't expect to be used concurrently (&ie;
+called from separate threads at the same time). Generally there are two
+ways of solving this:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Require the caller of any functions on this objects to
+acquire a lock before using them.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Making these objects really threadsafe and/or create
+per-thread instances of them.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+&arts; follows the first approach: you will need a lock whenever you talk to
+any of these objects. The second approach is harder to do. A hack which
+tries to achieve this is available at
+<ulink url="http://space.twc.de/~stefan/kde/download/arts-mt.tar.gz">
+http://space.twc.de/~stefan/kde/download/arts-mt.tar.gz</ulink>, but for
+the current point in time, a minimalistic approach will probably work
+better, and cause less problems with existing applications.
+</para>
+
+</sect2>
+<sect2 id="threads-locking">
+<title>When/how to acquire the lock?</title>
+
+<para>
+You can get/release the lock with the two functions:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+<ulink
+url="http://space.twc.de/~stefan/kde/arts-mcop-doc/arts-reference/headers/Arts__Dispatcher.html#lock"><function>Arts::Dispatcher::lock()</function></ulink>
+</para>
+</listitem>
+<listitem>
+<para>
+<ulink
+url="http://space.twc.de/~stefan/kde/arts-mcop-doc/arts-reference/headers/Arts__Dispatcher.html#unlock"><function>Arts::Dispatcher::unlock()</function></ulink>
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+Generally, you don't need to acquire the lock (and you shouldn't try to
+do so), if it is already held. A list of conditions when this is the
+case is:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+You receive a callback from the IOManager (timer or fd).
+</para>
+</listitem>
+
+<listitem>
+<para>
+You get call due to some &MCOP; request.
+</para>
+</listitem>
+
+<listitem>
+<para>
+You are called from the NotificationManager.
+</para>
+</listitem>
+
+<listitem>
+<para>
+You are called from the FlowSystem (calculateBlock)
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+There are also some exceptions of functions. which you can only call in
+the main thread, and for that reason you will never need a lock to call
+them:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Constructor/destructor of Dispatcher/IOManager.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<methodname>Dispatcher::run()</methodname> /
+<methodname>IOManager::run()</methodname>
+</para>
+</listitem>
+
+<listitem>
+<para><methodname>IOManager::processOneEvent()</methodname></para>
+</listitem>
+</itemizedlist>
+
+<para>
+But that is it. For everything else that is somehow related to &arts;,
+you will need to get the lock, and release it again when
+done. Always. Here is a simple example:
+</para>
+
+<programlisting>
+class SuspendTimeThread : Arts::Thread {
+public:
+ void run() {
+ /*
+ * you need this lock because:
+ * - constructing a reference needs a lock (as global: will go to
+ * the object manager, which might in turn need the GlobalComm
+ * object to look up where to connect to)
+ * - assigning a smartwrapper needs a lock
+ * - constructing an object from reference needs a lock (because it
+ * might need to connect a server)
+ */
+ Arts::Dispatcher::lock();
+ Arts::SoundServer server = Arts::Reference("global:Arts_SoundServer");
+ Arts::Dispatcher::unlock();
+
+ for(;;) { /*
+ * you need a lock here, because
+ * - dereferencing a smartwrapper needs a lock (because it might
+ * do lazy creation)
+ * - doing an MCOP invocation needs a lock
+ */
+ Arts::Dispatcher::lock();
+ long seconds = server.secondsUntilSuspend();
+ Arts::Dispatcher::unlock();
+
+ printf("seconds until suspend = %d",seconds);
+ sleep(1);
+ }
+ }
+}
+</programlisting>
+
+
+</sect2>
+
+<sect2 id="threads-classes">
+<title>Threading related classes</title>
+
+<para>
+The following threading related classes are currently available:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+<ulink
+url="http://www.arts-project.org/doc/headers/Arts__Thread.html"><classname>
+Arts::Thread</classname></ulink> - which encapsulates a thread.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<ulink url="http://www.arts-project.org/doc/headers/Arts__Mutex.html">
+<classname>Arts::Mutex</classname></ulink> - which encapsulates a mutex.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<ulink
+url="http://www.arts-project.org/doc/headers/Arts__ThreadCondition.html">
+<classname>Arts::ThreadCondition</classname></ulink> - which provides
+support to wake up threads which are waiting for a certain condition to
+become true.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<ulink
+url="http://www.arts-project.org/doc/headers/Arts__SystemThreads.html"><classname>Arts::SystemThreads</classname></ulink>
+- which encapsulates the operating system threading layer (which offers
+a few helpful functions to application programmers).
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+See the links for documentation.
+</para>
+
+</sect2>
+</sect1>
+
+<sect1 id="references-errors">
+<title>References and Error Handling</title>
+
+<para>
+&MCOP; references are one of the most central concepts in &MCOP;
+programming. This section will try to describe how exactly references
+are used, and will especially also try to cover cases of failure (server
+crashes).
+</para>
+
+<sect2 id="references-properties">
+<title>Basic properties of references</title>
+
+<itemizedlist>
+<listitem>
+<para>
+An &MCOP; reference is not an object, but a reference to an object: Even
+though the following declaration
+
+<programlisting>
+ Arts::Synth_PLAY p;
+</programlisting>
+
+looks like a definition of an object, it only declares a reference to an
+object. As C++ programmer, you might also think of it as Synth_PLAY *, a
+kind of pointer to a Synth_PLAY object. This especially means, that p
+can be the same thing as a NULL pointer.
+</para>
+</listitem>
+
+<listitem>
+<para>
+You can create a NULL reference by assigning it explicitly
+</para>
+<programlisting>
+ Arts::Synth_PLAY p = Arts::Synth_PLAY::null();
+</programlisting>
+</listitem>
+
+<listitem>
+<para>
+Invoking things on a NULL reference leads to a core dump
+</para>
+<programlisting>
+ Arts::Synth_PLAY p = Arts::Synth_PLAY::null();
+ string s = p.toString();
+</programlisting>
+<para>
+will lead to a core dump. Comparing this to a pointer, it is essentially
+the same as
+<programlisting>
+ QWindow* w = 0;
+ w-&gt;show();
+</programlisting>
+which every C++ programmer would know to avoid.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Uninitialized objects try to lazy-create themselves upon first use
+</para>
+
+<programlisting>
+ Arts::Synth_PLAY p;
+ string s = p.toString();
+</programlisting>
+<para>
+is something different than dereferencing a NULL pointer. You didn't tell
+the object at all what it is, and now you try to use it. The guess here
+is that you want to have a new local instance of a Arts::Synth_PLAY
+object. Of course you might have wanted something else (like creating the
+object somewhere else, or using an existing remote object). However, it
+is a convenient short cut to creating objects. Lazy creation will not work
+once you assigned something else (like a null reference).
+</para>
+
+<para>
+The equivalent C++ terms would be
+<programlisting>
+ QWidget* w;
+ w-&gt;show();
+</programlisting>
+
+which obviously in C++ just plain segfaults. So this is different here.
+This lazy creation is tricky especially as not necessarily an implementation
+exists for your interface.
+</para>
+
+<para>
+For instance, consider an abstract thing like a
+Arts::PlayObject. There are certainly concrete PlayObjects like those for
+playing mp3s or wavs, but
+
+<programlisting>
+ Arts::PlayObject po;
+ po.play();
+</programlisting>
+
+will certainly fail. The problem is that although lazy creation kicks
+in, and tries to create a PlayObject, it fails, because there are only
+things like Arts::WavPlayObject and similar. Thus, use lazy creation
+only when you are sure that an implementation exists.
+</para>
+</listitem>
+
+<listitem>
+<para>
+References may point to the same object
+</para>
+
+<programlisting>
+ Arts::SimpleSoundServer s = Arts::Reference("global:Arts_SimpleSoundServer");
+ Arts::SimpleSoundServer s2 = s;
+</programlisting>
+
+<para>
+creates two references referring to the same object. It doesn't copy any
+value, and doesn't create two objects.
+</para>
+</listitem>
+
+<listitem>
+<para>
+All objects are reference counted So once an object isn't referred any
+longer by any references, it gets deleted. There is no way to
+explicitly delete an object, however, you can use something like this
+<programlisting>
+ Arts::Synth_PLAY p;
+ p.start();
+ [...]
+ p = Arts::Synth_PLAY::null();
+</programlisting>
+to make the Synth_PLAY object go away in the end. Especially, it should never
+be necessary to use new and delete in conjunction with references.
+</para>
+</listitem>
+</itemizedlist>
+
+</sect2>
+
+<sect2 id="references-failure">
+<title>The case of failure</title>
+
+<para>
+As references can point to remote objects, the servers containing these
+objects can crash. What happens then?
+</para>
+
+<itemizedlist>
+
+<listitem>
+<para>
+A crash doesn't change whether a reference is a null reference. This
+means that if <function>foo.isNull()</function> was
+<returnvalue>true</returnvalue> before a server crash then it is also
+<returnvalue>true</returnvalue> after a server crash (which is
+clear). It also means that if <function>foo.isNull()</function> was
+<returnvalue>false</returnvalue> before a server crash (foo referred to
+an object) then it is also <returnvalue>false</returnvalue> after the
+server crash.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Invoking methods on a valid reference stays safe
+Suppose the server containing the object calc crashed. Still calling things
+like
+<programlisting>
+ int k = calc.subtract(i,j)
+</programlisting>
+are safe. Obviously subtract has to return something here, which it
+can't because the remote object no longer exists. In this case (k == 0)
+would be true. Generally, operations try to return something
+<quote>neutral</quote> as result, such as 0.0, a null reference for
+objects or empty strings, when the object no longer exists.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Checking <function>error()</function> reveals whether something worked.
+</para>
+
+<para>
+In the above case,
+<programlisting>
+ int k = calc.subtract(i,j)
+ if(k.error()) {
+ printf("k is not i-j!\n");
+ }
+</programlisting>
+would print out <computeroutput>k is not i-j</computeroutput> whenever
+the remote invocation didn't work. Otherwise <varname>k</varname> is
+really the result of the subtract operation as performed by the remote
+object (no server crash). However, for methods doing things like
+deleting a file, you can't know for sure whether it really happened. Of
+course it happened if <function>.error()</function> is
+<returnvalue>false</returnvalue>. However, if
+<function>.error()</function> is <returnvalue>true</returnvalue>, there
+are two possibilities:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+The file got deleted, and the server crashed just after deleting it, but
+before transferring the result.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The server crashed before being able to delete the file.
+</para>
+</listitem>
+</itemizedlist>
+</listitem>
+
+<listitem>
+<para>
+Using nested invocations is dangerous in crash resistant programs
+</para>
+
+<para>
+Using something like
+<programlisting>
+ window.titlebar().setTitle("foo");
+</programlisting>
+is not a good idea. Suppose you know that window contains a valid Window
+reference. Suppose you know that <function>window.titlebar()</function>
+will return a Titlebar reference because the Window object is
+implemented properly. However, still the above statement isn't safe.
+</para>
+
+<para>
+What could happen is that the server containing the Window object has
+crashed. Then, regardless of how good the Window implementation is, you
+will get a null reference as result of the window.titlebar()
+operation. And then of course invoking setTitle on that null reference
+will lead to a crash as well.
+</para>
+
+<para>
+So a safe variant of this would be
+<programlisting>
+ Titlebar titlebar = window.titlebar();
+ if(!window.error())
+ titlebar.setTitle("foo");
+</programlisting>
+add the appropriate error handling if you like. If you don't trust the
+Window implementation, you might as well use
+<programlisting>
+ Titlebar titlebar = window.titlebar();
+ if(!titlebar.isNull())
+ titlebar.setTitle("foo");
+</programlisting>
+which are both safe.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+There are other conditions of failure, such as network disconnection
+(suppose you remove the cable between your server and client while your
+application runs). However their effect is the same like a server crash.
+</para>
+
+<para>
+Overall, it is of course a consideration of policy how strictly you try
+to trap communication errors throughout your application. You might
+follow the <quote>if the server crashes, we need to debug the server
+until it never crashes again</quote> method, which would mean you need
+not bother about all these problems.
+</para>
+
+</sect2>
+
+<sect2 id="references-internals">
+<title>Internals: Distributed Reference Counting</title>
+
+<para>
+An object, to exist, must be owned by someone. If it isn't, it will
+cease to exist (more or less) immediately. Internally, ownership is
+indicated by calling <function>_copy()</function>, which increments an
+reference count, and given back by calling
+<function>_release()</function>. As soon as the reference count drops to
+zero, a delete will be done.
+</para>
+
+<para>
+As a variation of the theme, remote usage is indicated by
+<function>_useRemote()</function>, and dissolved by
+<function>_releaseRemote()</function>. These functions lead a list which
+server has invoked them (and thus owns the object). This is used in case
+this server disconnects (&ie; crash, network failure), to remove the
+references that are still on the objects. This is done in
+<function>_disconnectRemote()</function>.
+</para>
+
+<para>
+Now there is one problem. Consider a return value. Usually, the return
+value object will not be owned by the calling function any longer. It
+will however also not be owned by the caller, until the message holding
+the object is received. So there is a time of
+<quote>ownershipless</quote> objects.
+</para>
+
+<para>
+Now, when sending an object, one can be reasonable sure that as soon as
+it is received, it will be owned by somebody again, unless, again, the
+receiver dies. However this means that special care needs to be taken
+about object at least while sending, probably also while receiving, so
+that it doesn't die at once.
+</para>
+
+<para>
+The way &MCOP; does this is by <quote>tagging</quote> objects that are
+in process of being copied across the wire. Before such a copy is
+started, <function>_copyRemote</function> is called. This prevents the
+object from being freed for a while (5 seconds). Once the receiver calls
+<function>_useRemote()</function>, the tag is removed again. So all
+objects that are send over wire are tagged before transfer.
+</para>
+
+<para>
+If the receiver receives an object which is on his server, of course he
+will not <function>_useRemote()</function> it. For this special case,
+<function>_cancelCopyRemote()</function> exists to remove the tag
+manually. Other than that, there is also timer based tag removal, if
+tagging was done, but the receiver didn't really get the object (due to
+crash, network failure). This is done by the
+<classname>ReferenceClean</classname> class.
+</para>
+
+</sect2>
+
+</sect1>
+
+<sect1 id="detail-gui-elements">
+<title>&GUI; Elements</title>
+
+<para>
+&GUI; elements are currently in the experimental state. However, this
+section will describe what is supposed to happen here, so if you are a
+developer, you will be able to understand how &arts; will deal with
+&GUI;s in the future. There is some code there already, too.
+</para>
+
+<para>
+&GUI; elements should be used to allow synthesis structures to interact
+with the user. In the simplest case, the user should be able to modify
+some parameters of a structure directly (such as a gain factor which is
+used before the final play module).
+</para>
+
+<para>
+In more complex settings, one could imagine the user modifying
+parameters of groups of structures and/or not yet running structures,
+such as modifying the <acronym>ADSR</acronym> envelope of the currently
+active &MIDI; instrument. Another thing would be setting the filename of
+some sample based instrument.
+</para>
+
+<para>
+On the other hand, the user could like to monitor what the synthesizer
+is doing. There could be oscilloscopes, spectrum analyzers, volume
+meters and <quote>experiments</quote> that figure out the frequency
+transfer curve of some given filter module.
+</para>
+
+<para>
+Finally, the &GUI; elements should be able to control the whole
+structure of what is running inside &arts; and how. The user should be
+able to assign instruments to midi channels, start new effect
+processors, configure his main mixer desk (which is built of &arts;
+structures itself) to have one channel more and use another strategy for
+its equalizers.
+</para>
+
+<para>
+You see - the <acronym>GUI</acronym> elements should bring all
+possibilities of the virtual studio &arts; should simulate to the
+user. Of course, they should also gracefully interact with midi inputs
+(such as sliders should move if they get &MIDI; inputs which also change
+just that parameter), and probably even generate events themselves, to
+allow the user interaction to be recorded via sequencer.
+</para>
+
+<para>
+Technically, the idea is to have an &IDL; base class for all widgets
+(<classname>Arts::Widget</classname>), and derive a number of commonly
+used widgets from there (like <classname>Arts::Poti</classname>,
+<classname>Arts::Panel</classname>, <classname>Arts::Window</classname>,
+...).
+</para>
+
+<para>
+Then, one can implement these widgets using a toolkit, for instance &Qt;
+or Gtk. Finally, effects should build their &GUI;s out of existing
+widgets. For instance, a freeverb effect could build it's &GUI; out of
+five <classname>Arts::Poti</classname> thingies and an
+<classname>Arts::Window</classname>. So IF there is a &Qt;
+implementation for these base widgets, the effect will be able to
+display itself using &Qt;. If there is Gtk implementation, it will also
+work for Gtk (and more or less look/work the same).
+</para>
+
+<para>
+Finally, as we're using &IDL; here, &arts-builder; (or other tools) will
+be able to plug &GUI;s together visually, or autogenerate &GUI;s given
+hints for parameters, only based on the interfaces. It should be
+relatively straight forward to write a <quote>create &GUI; from
+description</quote> class, which takes a &GUI; description (containing
+the various parameters and widgets), and creates a living &GUI; object
+out of it.
+</para>
+
+<para>
+Based on &IDL; and the &arts;/&MCOP; component model, it should be easy
+to extend the possible objects which can be used for the &GUI; just as
+easy as it is to add a plugin implementing a new filter to &arts;.
+</para>
+
+</sect1>
+
+</chapter>
diff --git a/doc/artsbuilder/digitalaudio.docbook b/doc/artsbuilder/digitalaudio.docbook
new file mode 100644
index 00000000..99d24968
--- /dev/null
+++ b/doc/artsbuilder/digitalaudio.docbook
@@ -0,0 +1,14 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE appendix PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<appendix id="intro-digital-audio">
+<title>Introduction to Digital Audio</title>
+
+<para>digital sampling, filters, effects, &etc;</para>
+
+</appendix>
+
+
+
diff --git a/doc/artsbuilder/faq.docbook b/doc/artsbuilder/faq.docbook
new file mode 100644
index 00000000..c5b111ec
--- /dev/null
+++ b/doc/artsbuilder/faq.docbook
@@ -0,0 +1,1112 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+<chapter id="faq">
+<title>Questions and answers</title>
+
+<para>
+This section answers some frequently asked questions about &arts;.
+</para>
+
+<qandaset id="faq-general">
+<title>General Questions</title>
+
+<qandaentry>
+<question>
+<para>
+Does &kde; support my sound card for audio output?
+</para>
+</question>
+
+<answer>
+<para>
+&kde; uses &arts; to play sound, and &arts; uses the &Linux; kernel
+sound drivers, either <acronym>OSS</acronym> or <acronym>ALSA</acronym>
+(using <acronym>OSS</acronym> emulation). If your sound card is
+supported by either <acronym>ALSA</acronym> or <acronym>OSS</acronym>
+and properly configured (&ie; any other &Linux; application can output
+sound), it will work. There are however some problems with some specific
+hardware, please read the <link linkend="faq-hardware-specific">section
+for hardware specific problems</link> if you're having problems with artsd
+on your machine.
+</para>
+<para>
+Meanwhile also support for various other platforms has been added. Here is
+a complete list of how the most recent version of &arts; can play sound. If
+you have an unsupported platform, please consider porting &arts; to your
+platform.
+</para>
+
+<informaltable>
+<tgroup cols="2">
+<thead>
+<row>
+<entry>&arts; audio I/O method</entry>
+<entry>Comment</entry>
+</row>
+</thead>
+
+<tbody>
+<row>
+<entry>paud</entry>
+<entry>Support for AIX Personal Audio Device</entry>
+</row>
+
+<row>
+<entry>alsa</entry>
+<entry>Linux ALSA-0.5 and ALSA-0.9 drivers</entry>
+</row>
+
+<row>
+<entry>libaudioio</entry>
+<entry>Support for generic LibAudioIO library which works on Solaris</entry>
+</row>
+
+<row>
+<entry>nas</entry>
+<entry>NAS sound server, useful for X Terminals with NAS support</entry>
+</row>
+
+<row>
+<entry>null</entry>
+<entry>Null audio device, discards sound silently</entry>
+</row>
+
+<row>
+<entry>oss</entry>
+<entry>OSS (Open Sound System) support (works on Linux, various BSDs and
+ other platforms with OSS drivers installed)</entry>
+</row>
+
+<row>
+<entry>toss</entry>
+<entry>Threaded OSS support, which works better in some cases where the
+ standard OSS support doesn't work well</entry>
+</row>
+
+<row>
+<entry>sgi</entry>
+<entry>SGI Direct Media support for IRIX</entry>
+</row>
+
+<row>
+<entry>sun</entry>
+<entry>Solaris support</entry>
+</row>
+
+</tbody>
+</tgroup>
+</informaltable>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+I can't play <literal role="extension">wav</literal> files with &artsd;!
+</para>
+</question>
+
+<answer>
+<para>
+Check that &artsd; is linked to <filename>libaudiofile</filename>
+(<userinput><command>ldd</command>
+<parameter>artsd</parameter></userinput>). If it isn't, download
+kdesupport, recompile everything, and it will work.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+I hear sound when logged in as <systemitem
+class="username">root</systemitem> but no other users have sound!
+</para>
+</question>
+
+<answer>
+<para>
+The permissions of the file <filename
+class="devicefile">/dev/dsp</filename> affect which users will have
+sound. To allow everyone to use it, do this:
+</para>
+
+<procedure>
+<step>
+<para>
+Log in as <systemitem class="username">root</systemitem>.
+</para>
+</step>
+
+<step>
+<para>
+Open a &konqueror; window.
+</para>
+</step>
+
+<step>
+<para>
+Go into the <filename class="directory">/dev</filename> folder.
+</para>
+</step>
+
+<step>
+<para>
+Click on the file <filename>dsp</filename> with the
+<mousebutton>right</mousebutton> mouse button, and choose properties.
+</para>
+</step>
+
+<step>
+<para>
+Click on the <guilabel>Permissions</guilabel> tab.
+</para>
+</step>
+
+<step>
+<para>
+Check the <guilabel>Read</guilabel> and <guilabel>Write</guilabel> check
+boxes in all sections.
+</para>
+</step>
+
+<step>
+<para>
+Click on <guibutton>OK</guibutton>.
+</para>
+</step>
+</procedure>
+
+<para>
+You can achieve the same effect in a terminal window using the command
+<userinput><command>chmod</command> <option>666</option>
+<parameter>/dev/dsp</parameter></userinput>.
+</para>
+
+<para>
+For restricting access to sound to specific users, you can use group
+permissions. On some &Linux; distributions, for instance Debian/Potato,
+<filename class="devicefile">/dev/dsp</filename> is already owned by a
+group called <systemitem class="groupname">audio</systemitem>, so all
+you need to do is add the users to this group.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+This helps for &artsd;, but what about &kmix;, &kmid;, &kscd;,&etc;?
+</para>
+</question>
+<answer>
+
+<para>
+There are various other devices which provide functionality accessed by
+multimedia applications. You can treat them in the same way, either by
+making them accessible for everyone, or using groups to control
+access. Here is a list, which may still be incomplete (also if there are
+various devices in a form like <filename
+class="devicefile">midi0</filename>, <filename
+class="devicefile">midi1</filename>, ..., then only the 0-version is
+listed here):
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+<filename class="devicefile">/dev/admmidi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/adsp0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/amidi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/amixer0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/audio</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/audio0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/cdrom</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/dmfm0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/dmmidi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/dsp</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/dsp0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/midi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/midi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/midi00</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/midi00</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/mixer</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/mixer0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/mpu401data</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/mpu401stat</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/music</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/rmidi0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/rtc</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/sequencer</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/smpte0</filename>
+</para>
+</listitem>
+<listitem>
+<para>
+<filename class="devicefile">/dev/sndstat</filename>
+</para>
+</listitem>
+</itemizedlist>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>What can I do if artsd doesn't start or crashes while running?</para>
+</question>
+
+<answer>
+<para>
+First of all: try using the default settings in &kcontrol; (or if you
+are starting manually, don't give additional options besides maybe
+<userinput><option>-F</option><parameter>10</parameter>
+<option>-S</option><parameter>4096</parameter></userinput> for
+latency). Especially <emphasis>full duplex is likely to break</emphasis>
+with various drivers, so try disabling it.
+</para>
+
+<para>
+A good way to figure out why &artsd; doesn't start (or crashes while
+running) is to start it manually. Open a &konsole; window, and do:
+</para>
+
+<screen width="40"><prompt>%</prompt> <userinput><command>artsd</command> <option>-F</option><parameter>10</parameter> <option>-S</option><parameter>4096</parameter></userinput></screen>
+
+<para>
+You can also add the <option>-l0</option> option, which will print more
+information about what is happening, like this:
+</para>
+<screen width="40"><prompt>%</prompt> <userinput><command>artsd</command> <option>-l0</option> <option>-F</option><parameter>10</parameter> <option>-S</option><parameter>4096</parameter></userinput></screen>
+
+<para>
+Doing so, you will probably get some useful information why it didn't
+start. Or, if it crashes when doing this-and-that, you can do
+this-and-that, and see <quote>how</quote> it crashes. If you want to
+report a bug, producing a backtrace with <command>gdb</command> and/or
+an <command>strace</command> may help finding the problem.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>Can I relocate &artsd; (move compiled files to another
+folder)?</para>
+</question>
+
+<answer>
+<para>
+You can't relocate &arts; perfectly. The problem is that &artswrapper;
+has the location of &artsd; compiled in due to security reasons. You can
+however use the <filename>.mcoprc</filename> file
+(TraderPath/ExtensionPath entries) to at least make a relocated &artsd;
+find it's components. See the <link linkend="the-mcoprc-file">chapter
+about the <filename>.mcoprc</filename> file</link> for details on how to
+do this.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>Can I compile &arts; with gcc-3.0?</para>
+</question>
+
+<answer>
+<para>
+Short answer: no, &arts; will not work if you compile it with gcc-3.0.
+</para>
+
+<para>
+Long answer: In the official release, there are two gcc-3.0 bugs which affect
+&arts;. The first, gcc-3.0 bug c++/2733 is relatively harmless (and has to do
+with problems with the asm statement). It breaks compilation of convert.cc. It
+has been fixed in the gcc-3.0 CVS, and will no longer be a problem with
+gcc-3.0.1 and higher. A workaround has also been added to the CVS version
+of KDE/aRts.
+</para>
+<para>
+The second gcc-3.0 bug, c++/3145 (which is generation of wrong code for some
+cases of multiple virtual inheritance) is critical. Applications like &artsd;
+will simply crash on startup when compiled with gcc-3.0. Even if some progress
+has been made in the gcc-3.0 branch at time of this writing, still &artsd;
+crashes quite often, unpredictably.
+</para>
+</answer>
+</qandaentry>
+<qandaentry>
+<question>
+<para>What applications run under &arts;?</para>
+</question>
+<answer>
+
+<para>
+Obviously, all of the applications included with &kde; are
+&arts;-aware. This includes:
+</para>
+
+<itemizedlist>
+<listitem><para>&noatun;</para></listitem>
+<listitem><para>&arts-builder;</para></listitem>
+<listitem><para>&aktion;</para></listitem>
+<listitem><para>&kmid;</para></listitem>
+<listitem><para>&kmidi;</para></listitem>
+<listitem><para>&kmix;</para></listitem>
+<listitem><para>&kscd;</para></listitem>
+<listitem><para>&kde; games such as &kpoker; and
+&ktuberling;</para></listitem>
+</itemizedlist>
+
+<para>
+Some &kde; applications that are not yet included in the &kde; release
+(&eg; in kdenonbeta) also support &arts;, including:
+</para>
+
+<itemizedlist>
+<listitem><para>&brahms;</para></listitem>
+<listitem><para><application>Kaboodle</application></para></listitem>
+<listitem><para><application>Kdao</application></para></listitem>
+</itemizedlist>
+
+<para>
+The following non-&kde; applications are known to work with &arts;:
+</para>
+
+<itemizedlist>
+<listitem><para><application>xmms</application> (with &arts;
+plug-in)</para></listitem>
+<listitem><para>Real Networks <application>RealPlayer</application> 8.0
+(works with &artsdsp;; native &arts; support is being
+considered)</para></listitem>
+</itemizedlist>
+
+<para>
+The following applications are known <emphasis>not</emphasis> to work
+with &arts;:
+</para>
+
+<itemizedlist>
+<listitem><para>none</para></listitem>
+</itemizedlist>
+
+<para>
+See also the answers to the questions in the section on
+<link linkend="faq-non-arts">non-&arts; applications</link>.
+</para>
+
+<para>
+This section is incomplete -- if you have more information on supported
+and unsupported applications, please send them to the author so they can
+be included here.
+</para>
+</answer>
+</qandaentry>
+
+</qandaset>
+
+<qandaset id="faq-non-arts">
+<title>Non-&arts; Applications</title>
+
+<qandaentry>
+<question>
+<para>
+As soon as &kde; is running, no other application can access my sound device!
+</para>
+</question>
+<answer>
+<para>
+Since the &arts; sound server used by &kde; is running, it is using the
+sound device. If the server is idle for 60 seconds, it will
+auto-suspend and release it automatically.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+You said it suspends after 60 seconds, it doesn't for me!
+</para>
+</question>
+<answer>
+<para>
+If you start artsd from the KDE control panel, the default is to suspend
+after 60 seconds. If you start artsd from the command line you need to
+use the -s option to specify the autosuspend time, otherwise it will
+default to disabling the autosuspend feature.
+</para>
+<para>
+Currently it doesn't suspend when using full duplex. Turn full duplex
+off from the &kcontrol; and it will suspend. Disabling full duplex is
+generally a good idea anyway if you only use &arts; for playing audio
+and not recording.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+How can I run old, non-&arts; applications?
+</para>
+</question>
+
+<answer>
+<para>
+Run them using the &artsdsp;. For instance, if you normally would run:
+</para>
+
+<screen><prompt>&percnt;</prompt> <userinput><command>mpg123</command> <option>foo.mp3</option></userinput></screen>
+
+<para>instead use:</para>
+
+<screen><prompt>&percnt;</prompt> <userinput><command>artsdsp</command> <option>mpg123 foo.mp3</option></userinput></screen>
+
+<para>
+This will redirect the sound output to &arts;. This method doesn't
+require changes to the applications. It is something of an ugly hack
+however, and does not yet fully support all features of the sound card
+device, so some applications may not work.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+I can't run &artsdsp; with any application, it always crashes!
+</para>
+</question>
+<answer>
+<para>
+You need a recent version of the glibc library; &artsdsp; will not work
+reliably on some older &Linux; distributions. For instance, on Debian
+2.1 (which is glibc 2.0 based) it doesn't work, while on Debian 2.2
+(which is glibc 2.1.3 based), it does.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+Are there theoretical limitations with some applications that will
+prevent them from ever working with &artsdsp;?
+</para>
+</question>
+<answer>
+<para>
+No. Using &artsdsp; can result in slightly more latency and
+<acronym>CPU</acronym> usage that using the &arts;
+<acronym>API</acronym>s directly. Other than that, any application that
+doesn't work should be considered a bug in &artsdsp;. The technique used
+by &artsdsp; should, if implemented properly, allow
+<emphasis>every</emphasis> application to work with it (including large
+applications like <application>Quake</application> 3).
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+What can I do if an application doesn't work with &artsdsp;?
+</para>
+</question>
+<answer>
+<para>
+You can wait for &artsd; to suspend or use the command
+<userinput><command>artsshell</command>
+<option>suspend</option></userinput> to ask the server to suspend
+itself. You will only be able to suspend the server if no &arts;
+applications are currently using it, and no &arts; applications will be
+able to run when the server is suspended.
+</para>
+
+<para>
+If the server is busy, a crude but effective way to get rid of it is:
+</para>
+
+
+<screen><prompt>&percnt;</prompt> <userinput><command>killall</command> <option>artsd</option> ; <command>killall</command> <option>artswrapper</option></userinput>
+<lineannotation>Now start your own application.</lineannotation>
+<prompt>&percnt;</prompt> <userinput><command>kcminit</command> <option>arts</option></userinput>
+</screen>
+
+<para>
+Any currently running &arts; applications may crash, however, once you
+kill the server.
+</para>
+</answer>
+</qandaentry>
+<qandaentry>
+<question>
+<para>
+What about applications written for &kde; 1.x?
+</para>
+</question>
+<answer>
+<para>
+If you are running &kde; 1.x applications, which output sound via the
+&kde; 1 audio server, you will need to run
+<application>kaudioserver</application> to make it work. You can start
+<application>kaudioserver</application> in the same way than other
+non-&arts;-applications:
+</para>
+
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>artsdsp</command> <option>kaudioserver</option></userinput>
+</screen>
+
+<para>
+You will need to have installed kaudioserver (from the same source where
+you got your &kde; 1.x applications from) - it belongs to &kde; 1.x, not
+&kde; 2.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+What about applications using the enlightened sound daemon,
+<acronym>ESD</acronym>?
+</para>
+</question>
+<answer>
+<para>
+The issue is similar than with
+<application>kaudioserver</application>. Such applications will need a
+running esd server. You can start <command>esd</command> via &artsdsp;,
+and every <acronym>ESD</acronym> aware application should work fine,
+like this:
+</para>
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>artsdsp</command> <option>esd</option></userinput>
+</screen>
+<para>
+Newer versions of aRts (>= 1.2.0) also can also use the enlightened sound
+daemon instead of directly accessing the soundcard. On the command line, you
+can use the -a option, such as
+</para>
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>artsd</command> <option>-a esd</option></userinput>
+</screen>
+<para>
+to get EsounD support, whereas in KDE, you can use kcontrol to configure artsd
+to use esd via Sound -&gt; Sound Server -&gt; Sound I/O.
+</para>
+</answer>
+</qandaentry>
+
+</qandaset>
+
+<qandaset id="faq-latency">
+<title>Latency</title>
+
+<qandaentry>
+<question>
+<para>
+I sometimes hear short pauses when listening to music, is this a bug?
+</para>
+</question>
+<answer>
+<para>
+This is most likely not a bug, but caused by the fact that the &Linux;
+kernel is not very good at real-time scheduling. There are situations
+where &arts; will not be able to keep up with playback. You can,
+however, enable real-time rights (via &kcontrol;), and use a large
+latency setting (like <guilabel>250ms</guilabel> or <guilabel>don't
+care</guilabel>), which should improve the situation.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+What's the effect of the response time setting?
+</para>
+</question>
+<answer>
+<para>
+The help text for this setting in the &kcontrol; can be misleading. A
+lower value means that &arts; will take less time to respond to external
+events (&ie;. the time that it takes between closing a window and
+hearing a sound played by &artsd;). It will also use more
+<acronym>CPU</acronym> resources, and be more likely to cause
+dropouts.</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+Is there anything else I can do to prevent pauses?
+</para>
+</question>
+<answer>
+<para>
+For users of <acronym>IDE</acronym> drives, you can use the
+<command>hdparm</command> command to put your <acronym>IDE</acronym>
+drive in <acronym>DMA</acronym> mode. A word of warning: this does not
+work on all hardware, and can result in having to do a hard reset or in
+rare cases, data loss. Read the documentation for the
+<command>hdparm</command> command for more details. I have successfully
+used the following command:
+</para>
+
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>hdparm</command> <option>-c1</option> <option>-d1</option> <option>-k1</option> <option>-K1</option> <parameter>/dev/hda</parameter></userinput>
+</screen>
+
+<para>
+You need to run this after every boot, so you might want to place it in
+a system startup script (how to do this distribution specific, on Debian
+&Linux; it is usually put in <filename>/etc/rc.boot</filename>).
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+Realtime priority doesn't seem to have any effect for me?
+</para>
+</question>
+<answer>
+<para>
+Verify that artswrapper is really installed suid <systemitem class="username">root</systemitem>, like it is supposed to
+be. A lot of distributions (SuSE7.x for instance) don't do this. You can verify
+this using: ls -l $(which artswrapper). Good:
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>ls</command> <option>-l</option> <parameter>$(which artswrapper)</parameter></userinput>
+-rwsr-xr-x 1 root root 4556 Sep 24 18:05 /opt/kde2/bin/artswrapper
+</screen>
+Bad:
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>ls</command> <option>-l</option> <parameter>$(which artswrapper)</parameter></userinput>
+-rwxr-xr-x 1 root root 4556 Sep 24 18:05 /opt/kde2/bin/artswrapper
+</screen>
+If you are not having the s, you can get it using:
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>chown</command> <option>root</option> <parameter>$(which artswrapper)</parameter></userinput>
+<prompt>&percnt;</prompt> <userinput><command>chmod</command> <option>4755</option> <parameter>$(which artswrapper)</parameter></userinput>
+</screen>
+</para>
+
+<para>If you make &artswrapper; SUID <systemitem
+class="username">root</systemitem>, it will likely improve the quality
+of your audio playback by reducing gaps in the music. However, it
+also increases the risk that a bug in the code or a malicious user can
+crash or otherwise harm your machine. In addition, on multi-user
+machines, prioritizing high-quality audio may result in deteriorated
+performance for the users who are trying to make
+<quote>productive</quote> use of the machine.</para>
+
+</answer>
+</qandaentry>
+
+
+<qandaentry>
+<question>
+<para>
+Why is &artsd; taking so much <acronym>CPU</acronym> time?
+</para>
+</question>
+<answer>
+<para>
+Check your response time settings. However, the current version is not
+yet really optimized. This will improve, and until then no real
+prediction can be made how fast &artsd; can or can't be.
+</para>
+</answer>
+</qandaentry>
+</qandaset>
+
+<qandaset id="faq-network">
+<title>Network Transparency</title>
+
+<qandaentry>
+<question>
+<para>
+What do I need for network transparency?
+</para>
+</question>
+<answer>
+<para>
+Enable it in the &kcontrol; <guilabel>Sound Server</guilabel> settings
+(<guilabel>enable X11 server for security information</guilabel> and
+<guilabel>network transparency</guilabel>). Then copy your
+<filename>.mcoprc</filename> to all machines you plan to use network
+transparency from. Log in again. Make sure that the hosts that interact
+know each other by name (&ie; they have resolvable names or are in
+<filename>/etc/hosts</filename>).
+</para>
+
+<para>
+This should be all you need to do. However, if it still doesn't work
+here are some additional details. The &arts; sound server process,
+&artsd;, should only run on one host, the one with the sound card where
+the sound should be played. It can be started automatically on login by
+&kde; (if you configure that in &kcontrol;), or manually using something
+like:
+</para>
+
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>artsd</command> <option>-n</option> <option>-F</option> <parameter>5</parameter> <option>-S</option> <parameter>8192</parameter></userinput>
+</screen>
+
+<para>
+The <option>-n</option> parameter is for network transparency, while the
+others configure latency.
+</para>
+
+<para>
+Your <filename>.mcoprc</filename> file should have this entry:
+</para>
+
+<screen>
+<userinput>GlobalComm=Arts::X11GlobalComm</userinput>
+</screen>
+
+<para>
+on all machines involved, in order for network transparency to work,
+This is what is enabled by the <guilabel>X11 server for security
+information</guilabel> control panel setting.
+</para>
+
+<para>
+Finally, in any &kde; version in the 2.0.x series, there is a bug which
+applies if you don't have a domain name set. Clients of &artsd; try to
+find where to connect to via the <systemitem
+class="systemname"><replaceable>hostname</replaceable>.<replaceable>domainname</replaceable></systemitem>
+combination. If your domain name is empty, it will try to connect to
+<systemitem
+class="systemname"><replaceable>hostname</replaceable></systemitem>. (note
+the extra dot). Adding an entry like this to
+<filename>/etc/hosts</filename> (&ie; <userinput>orion.</userinput> if
+your hostname is <systemitem class="systemname">orion</systemitem>)
+works around the problem.
+</para>
+</answer>
+</qandaentry>
+
+
+<qandaentry>
+<question>
+<para>
+How do I debug network transparency if it doesn't work?
+</para>
+</question>
+<answer>
+<para>
+Assuming you have the &kde; source code, go to <filename
+class="directory">kdelibs/arts/examples</filename>, and run
+<userinput><command>make</command> <option>check</option></userinput> to
+compile some programs, including
+<application>referenceinfo</application>. Then run
+</para>
+
+<screen>
+<prompt>&percnt;</prompt> <userinput><command>./referenceinfo</command> <option>global:Arts&lowbar;SimpleSoundServer</option></userinput>
+</screen>
+
+<para>
+The output will indicate the host name and port being used by
+&arts;. For example, <computeroutput>tcp:orion:1698</computeroutput>
+would mean that any client trying to use network transparency should
+know how to reach host <systemitem
+class="systemname">orion</systemitem>.
+</para>
+</answer>
+</qandaentry>
+
+</qandaset>
+
+<qandaset id="faq-hardware-specific">
+<title>Hardware specific questions</title>
+
+<qandaentry>
+<question>
+<para>
+What hardware artsd doesn't work well with?
+</para>
+</question>
+<answer>
+<para>
+It seems that there are a few linux drivers which don't work well with aRts in
+some kernel versions. Please read this list before reporting a bug. If you
+find that some information in this list is incomplete, please don't hesitate
+to let us know.
+
+<informaltable>
+<tgroup cols="4">
+<thead>
+<row>
+<entry>Linux Driver / Soundcard</entry>
+<entry>Fails under</entry>
+<entry>Works under</entry>
+<entry>Remarks</entry>
+</row>
+</thead>
+
+<tbody>
+<row>
+<entry>i810 driver (Intel 810 + AC97 Audio)</entry>
+<entry>2.4.9</entry>
+<entry>2.4.18, 2.2.20, commercial oss driver, alsa-0.5.12a with OSS emulation</entry>
+<entry>driver causes cpu overload (see below)</entry>
+</row>
+
+<row>
+<entry>maestro 3/4 chipset</entry>
+<entry>2.4.9</entry>
+<entry>?</entry>
+<entry>driver sometimes causes cpu overload (see below)</entry>
+</row>
+
+<row>
+<entry>aureal8820, aureal8830 drivers from sourceforge</entry>
+<entry>2.4.17</entry>
+<entry>?</entry>
+<entry>driver triggers assertion / causes cpu overload (see below)</entry>
+</row>
+
+<row>
+<entry>OSS Commercial 3.9.4g with Aureal Vortex</entry>
+<entry>?</entry>
+<entry>?</entry>
+<entry>system lockup</entry>
+</row>
+
+<row>
+<entry>ymfpci</entry>
+<entry>2.4.0, 2.4.12</entry>
+<entry>2.4.17</entry>
+<entry>driver triggers assertion (see below)</entry>
+</row>
+
+
+
+</tbody>
+</tgroup>
+</informaltable>
+</para>
+</answer>
+</qandaentry>
+
+
+
+<qandaentry>
+<question>
+<para>
+Why are there hardware specific problems and how do I see them?
+</para>
+</question>
+<answer>
+<para>
+The usual problem is that the driver doesn't supply aRts with enough or accurate
+enough information on when to write sound data. Most OSS drivers do supply
+correct information, but not all.
+</para>
+<para>
+You might notice that some other applications (like xmms) may not need this
+data, and thus work correctly even with your hardware. However, &arts; needs
+this data, so artsd might fail. This is still a bug in the driver, and not
+in &arts;.
+</para>
+<para>
+There are two kinds of behavior that artsd exposes on being run on an incorrect
+driver. Either, it continously tries to feed new data, but never really
+succeeds, which eventually leads to consuming all CPU power and reporting
+<emphasis>cpu overload</emphasis> and exiting. The other problem is that artsd
+might get supplied with wrong information how much to write. Artsd will then
+<emphasis>stop with an assertion</emphasis> like:
+<screen>
+artsd: audiosubsys.cc:458: void Arts::AudioSubSystem::handleIO(int):
+Assertion `len == can_write' failed.
+Aborted
+</screen>
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+What is wrong in the driver if I get the cpu overload problem?
+</para>
+</question>
+<answer>
+<para>
+Usually, artsd uses select() to find out when to write new data. Then, it
+uses an ioctl(...GETOSPACE...) to find out how much data to write. Finally,
+it writes this data.
+</para>
+<para>
+A problem occurs if artsd is woken up either always or if there are minimal
+amounts of data to write. The OSS documentation specifies that select() only
+wakes up a process if there is at least one fragment to write. However, if
+artsd is woken up if there isn't data to write, or very little, for instance
+one sample, then it will keep writing little pieces of audio data, which can
+be very costly, and eventually overload the cpu.
+</para>
+<para>
+To fix this, the driver should wake up artsd only if there is a full fragment
+to write.
+</para>
+</answer>
+</qandaentry>
+
+<qandaentry>
+<question>
+<para>
+What is wrong in the driver if I get the assertion?
+</para>
+</question>
+<answer>
+<para>
+Usually, artsd uses select() to find out when to write new data. Then, it
+uses an ioctl(...GETOSPACE...) to find out how much data to write. Finally,
+it writes this data.
+</para>
+<para>
+If artsd can't write as much data as indicated by the ioctl, it will fail in
+the assertion. To fix this, the driver should supply the correct amount of
+free space.
+</para>
+</answer>
+</qandaentry>
+</qandaset>
+
+<qandaset id="faq-other">
+<title>Other Issues</title>
+
+<qandaentry>
+<question>
+<para>
+I can't use &arts-builder;. It crashes when executing a module!
+</para>
+</question>
+<answer>
+<para>
+The most likely cause is that you are using old structures or modules
+which aren't supported with the &kde; 2 version. Unfortunately the
+documentation which is on the web refers to &arts;-0.3.4.1 which is
+quite outdated. The most often reported crash is: that performing an
+execute structure in &arts-builder; results in the error message
+<errorname>[artsd] Synth_PLAY: audio subsystem is already
+used.</errorname>
+</para>
+
+<para>
+You should use a Synth_AMAN_PLAY instead of a Synth_PLAY module and the
+problem will go away. Also see the &arts-builder; help file (hit
+<keycap>F1</keycap> in &arts-builder;).
+</para>
+
+<para>
+Recent versions of &arts-builder; (&kde; 2.1 beta 1 and later) come with
+a set of examples which you can use.
+</para>
+</answer>
+</qandaentry>
+
+</qandaset>
+
+</chapter>
diff --git a/doc/artsbuilder/future.docbook b/doc/artsbuilder/future.docbook
new file mode 100644
index 00000000..529cc103
--- /dev/null
+++ b/doc/artsbuilder/future.docbook
@@ -0,0 +1,414 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.1.2-Based Variant
+V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="future-work">
+<title>Future Work</title>
+
+<para>
+This section describes some of the &arts; work that is in progress.
+Development progresses quickly, so this information may be out of date.
+You should check the TODO list file and the <link
+linkend="mailing-lists">mailing list</link> archives to see what new
+functionality is planned. Feel free to get involved in new design and
+implementation.
+</para>
+
+<para>
+This is a draft document which tries to give you an overview how new
+technologies will be integrated in &arts;. Namely, it does cover the
+following:
+</para>
+
+<itemizedlist>
+<listitem><para>How interfaces work.</para></listitem>
+<listitem><para>Codecs - decoding of mp3 or wav streams in a form that
+they can be used as data.</para></listitem>
+<listitem><para>Video.</para></listitem>
+<listitem><para>Threading.</para></listitem>
+<listitem><para>Synchronization.</para></listitem>
+<listitem><para>Dynamic expansion/masquerading.</para></listitem>
+<listitem><para>Dynamic composition.</para></listitem>
+<listitem><para>&GUI;</para></listitem>
+<listitem><para>&MIDI;</para></listitem>
+</itemizedlist>
+
+<para>
+This is work in progress. However, it should be the base if you want to
+see new technology in &arts;. It should give you a general idea how
+these problems will be addressed. However, feel free to correct anything
+you see here.
+</para>
+
+<para>
+Things that will be use &arts; technology (so please, coordinate your
+efforts):
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+<application>KPhone</application> (voice over <acronym>IP</acronym>)
+</para>
+</listitem>
+
+<listitem>
+<para>
+&noatun; (video / audio player)
+</para>
+</listitem>
+
+<listitem>
+<para>
+&artscontrol; (sound server control program, for scopes)
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>Brahms</application> (music sequencer)
+</para>
+</listitem>
+
+<listitem>
+<para><application>Kaiman</application> (&kde;2 media player - kmedia2 compliant)
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>mpglib</application>/<application>kmpg</application>
+(<acronym>mpg</acronym> audio and video playing technology)
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>SDL</application> (direct media layer for games not
+yet started but maybe nice)
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>electric ears</application> (author contacted me - status
+unknown)
+</para>
+</listitem>
+</itemizedlist>
+
+<sect1 id="interfaces-how">
+<title>How Interfaces Work</title>
+
+<!-- I think this is now obsolete and documented elsewhere ? -->
+
+<para>
+&MCOP; interfaces are the base of the &arts; concept. They are the
+network transparent equivalent to C++ classes. Whenever possible you
+should orient your design towards interfaces. Interfaces consist of four
+parts:
+</para>
+
+<itemizedlist>
+<listitem><para>Synchronous streams</para></listitem>
+<listitem><para>Asynchronous streams</para></listitem>
+<listitem><para>Methods</para></listitem>
+<listitem><para>Attributes</para></listitem>
+</itemizedlist>
+
+<para>
+These can be mixed in any way you like. New technologies should be
+defined in terms of interfaces. Read the sections about asynchronous
+streams and synchronous streams, as well as the KMedia2 interfaces,
+which are a good example how such things work
+</para>
+
+<para>
+Interfaces are specified in <literal role="extension">.idl</literal>
+code and run through the <command>mcopidl</command> compiler. You
+derive the
+<classname><replaceable>Interfacename</replaceable>_impl</classname>
+class to implement them, and use
+<function>REGISTER_IMPLEMENTATION(Interfacename_impl)</function> to
+insert your object implementations into the &MCOP; object system.
+</para>
+
+</sect1>
+
+<sect1 id="codecs">
+<title>Codecs - Data Decoding</title>
+
+<para>
+The kmedia2 interfaces allow you to ignore that wav files, mp3s and
+whatever consist of data streams. Instead, you only implement methods to
+play them.
+</para>
+
+<para>
+Thus, you can write a wave loading routine in a way that you can play
+wave files (as PlayObject), but nobody else can use your code.
+</para>
+
+<para>
+Asynchronous streams would be the alternative. You define an interface
+which allows you to pass data blocks in, and get data blocks out. This
+looks like that in &MCOP;:
+</para>
+
+<programlisting>
+interface Codec {
+ in async byte stream indata;
+ out async byte stream outdata;
+};
+</programlisting>
+
+
+<para>
+Of course codecs could also provide attributes to emit additional data,
+such as format information.
+</para>
+
+<programlisting>
+interface ByteAudioCodec {
+ in async byte stream indata;
+ out async byte stream outdata;
+ readonly attribute samplingRate, bits, channels;
+};
+</programlisting>
+
+<para>
+This <interfacename>ByteAudioCodec</interfacename> for instance could be
+connected to a <interfacename>ByteStreamToAudio</interfacename> object,
+to make real float audio.
+</para>
+
+<para>
+Of course, other Codec types could involve directly emitting video data,
+such as
+</para>
+
+<programlisting>
+interface VideoCodec {
+ in async byte stream indata;
+ out video stream outdata; /* note: video streams do not exist yet */
+};
+</programlisting>
+
+<para>
+Most likely, a codec concept should be employed rather than the
+<quote>you know how to play and I don't</quote> way for instance
+<interfacename>WavPlayObject</interfacename> currently uses. However,
+somebody needs to sit down and do some experiments before an
+<acronym>API</acronym> can be finalized.
+</para>
+
+</sect1>
+
+<sect1 id="video">
+<title>Video</title>
+
+<para>
+My idea is to provide video as asynchronous streams of some native
+&MCOP; data type which contains images. This data type is to be created
+yet. Doing so, plugins which deal with video images could be connected
+the same way audio plugins can be connected.
+</para>
+
+<para>
+There are a few things that are important not to leave out, namely:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+There are <acronym>RGB</acronym> and <acronym>YUV</acronym> colorspaces.
+</para>
+</listitem>
+<listitem>
+<para>
+The format should be somehow tagged to the stream.
+</para>
+</listitem>
+<listitem>
+<para>
+Synchronization is important.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+My idea is to leave it possible to reimplement the
+<classname>VideoFrame</classname> class so that it can store stuff in a
+shared memory segment. Doing so, even video streaming between different
+processes would be possible without too much pain.
+</para>
+
+<para>
+However, the standard situation for video is that things are in the same
+process, from the decoding to the rendering.
+</para>
+
+<para>
+I have done a prototypic video streaming implementation, which you can
+download <ulink
+url="http://space.twc.de/~stefan/kde/download/video-quickdraw.tar.gz">here
+</ulink>. This would need to be integrated into &MCOP; after some
+experiments.
+</para>
+
+<para>
+A rendering component should be provided that supports XMITSHM (with
+<acronym>RGB</acronym> and <acronym>YUV</acronym>), Martin Vogt told me
+he is working on such a thing.
+</para>
+
+</sect1>
+
+<sect1 id="threading">
+<title>Threading</title>
+
+<para>
+Currently, &MCOP; is all single threaded. Maybe for video we will no
+longer be able to get around threading. Ok. There are a few things that
+should be treated carefully:
+</para>
+
+
+<itemizedlist>
+<listitem><para>
+SmartWrappers - they are not threadsafe due to non-safe reference
+counting and similar.
+</para>
+</listitem>
+<listitem>
+<para>
+Dispatcher / I/O - also not threadsafe.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+However, what I could imagine is to make selected modules threadsafe,
+for both, synchronous and asynchronous streaming. That way - with a
+thread aware flow system, you could schedule the signal flow over two or
+more processors. This would also help audio a lot on multiprocessor
+things.
+</para>
+
+<para>
+How it would work:
+</para>
+
+
+<itemizedlist>
+<listitem>
+<para>The Flow System decides which modules should calculate what - that
+is:
+</para>
+ <itemizedlist>
+ <listitem><para>video frames (with process_indata method)</para></listitem>
+ <listitem><para>synchronous audio streams
+ (calculateBlock)</para></listitem>
+ <listitem><para>other asynchronous streams, mainly byte
+ streams</para></listitem>
+ </itemizedlist>
+</listitem>
+<listitem>
+<para>
+Modules can calculate these things in own threads. For audio, it makes
+sense to reuse threads (&eg; render on four threads for four processors,
+no matter if 100 modules are running). For video and byte decompression,
+it may be more confortable to have a blocking implementation in an own
+thread, which is synchronized against the rest of &MCOP; by the flow
+system.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Modules may not use &MCOP; functionality (such as remote invocations)
+during threaded operation.
+</para>
+</listitem>
+</itemizedlist>
+
+</sect1>
+
+<sect1 id="synchronization">
+<title>Synchronization</title>
+
+<para>
+Video and &MIDI; (and audio) may require synchronization. Basically, that
+is timestamping. The idea I have is to attach timestamps to asynchronous
+streams, by adding one timestamp to each packet. If you send two video
+frames, simply make it two packets (they are large anyway), so that you
+can have two different time stamps.
+</para>
+
+<para>
+Audio should implicitely have time stamps, as it is synchronous.
+</para>
+
+</sect1>
+
+<sect1 id="dynamic-composition">
+<title>Dynamic Composition</title>
+
+<para>
+It should be possible to say: An effect FX is composed out of these
+simpler modules. FX should look like a normal &MCOP; module (see
+masquerading), but in fact consist of other modules.
+</para>
+
+<para>
+This is required for &arts-builder;.
+</para>
+
+</sect1>
+
+<sect1 id="gui">
+<title>&GUI;</title>
+
+<para>
+All &GUI; components will be &MCOP; modules. They should have attributes
+like size, label, color, ... . A <acronym>RAD</acronym> builder
+(&arts-builder;) should be able to compose them visually.
+</para>
+
+<para>
+The &GUI; should be saveable by saving the attributes.
+</para>
+
+</sect1>
+
+<sect1 id="midi-stuff">
+<title>&MIDI;</title>
+
+<para>
+The &MIDI; stuff will be implemented as asynchronous streams. There are
+two options, one is using normal &MCOP; structures to define the types
+and the other is to introduce yet another custom types.
+</para>
+
+<para>
+I think normal structures may be enough, that is something like:
+</para>
+
+<programlisting>
+struct MidiEvent {
+ byte b1,b2,b3;
+ sequence&lt;byte&gt; sysex;
+}
+</programlisting>
+
+<para>
+Asynchronous streams should support custom stream types.
+</para>
+
+</sect1>
+
+</chapter>
+
+
diff --git a/doc/artsbuilder/glossary.docbook b/doc/artsbuilder/glossary.docbook
new file mode 100644
index 00000000..12fd2dac
--- /dev/null
+++ b/doc/artsbuilder/glossary.docbook
@@ -0,0 +1,164 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE glossary PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<glossary id="glossary">
+
+<glossentry id="gloss-alsa">
+<glossterm><acronym>ALSA</acronym></glossterm>
+<glossdef>
+<para>
+Advanced &Linux; Sound Architecture; a &Linux; sound card driver; not
+currently included with the standard kernel source code.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-arts">
+<glossterm>&arts;</glossterm>
+<glossdef>
+<para>
+Analog Real-Time Synthesizer; the name of the multimedia
+architecture/library/toolkit used by the &kde; project (note
+capitalization)
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-bsd">
+<glossterm><acronym>BSD</acronym></glossterm>
+<glossdef>
+<para>
+Berkeley Software Distribution; here refers to any of several free
+&UNIX;-compatible operating systems derived from <acronym>BSD</acronym>
+&UNIX;.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-corba">
+<glossterm><acronym>CORBA</acronym></glossterm>
+<glossdef>
+<para>
+Common Object Request Broker Architecture; a standard for implementing
+object-oriented remote execution.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-cvs">
+<glossterm><acronym>CVS</acronym></glossterm>
+<glossdef>
+<para>
+Concurrent Versions System; a software configuration management system
+used by many software projects including &kde; and &arts;.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="glos-fft">
+<glossterm><acronym>FFT</acronym></glossterm>
+<glossdef>
+<para>
+Fast Fourier Transform; an algorithm for converting data from the time
+to frequency domain; often used in signal processing.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-full-duplex">
+<glossterm>Full Duplex</glossterm>
+<glossdef>
+<para>
+The ability of a sound card to simultaneously record and play audio.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-gpl">
+<glossterm><acronym>GPL</acronym></glossterm>
+<glossdef>
+<para>
+<acronym>GNU</acronym> General Public License; a software license
+created by the Free Software Foundation defining the terms for releasing
+free software.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-gui">
+<glossterm>&GUI;</glossterm>
+<glossdef>
+<para>
+Graphical User Interface
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-idl">
+<glossterm><acronym>IDL</acronym></glossterm>
+<glossdef>
+<para>
+Interface Definition Language; a programming language independent format
+for specifying interfaces (methods and data).
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-kde">
+<glossterm>&kde;</glossterm>
+<glossdef>
+<para>
+K Desktop Environment; a project to develop a free graphical desktop
+environment for &UNIX; compatible systems.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-lgpl">
+<glossterm><acronym>LGPL</acronym></glossterm>
+<glossdef>
+<para>
+<acronym>GNU</acronym> Lesser General Public License; a software license
+created by the Free Software Foundation defining the terms for releasing
+free software; less restrictive than the <acronym>GPL</acronym> and
+often used for software libraries.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-mcop">
+<glossterm>&MCOP;</glossterm>
+<glossdef>
+<para>
+Multimedia COmmunication Protocol; the protocol used for communication
+between &arts; software modules; similar to <acronym>CORBA</acronym> but
+simpler and optimized for multimedia.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-midi">
+<glossterm>&MIDI;</glossterm>
+<glossdef>
+<para>
+Musical Instrument Digital Interface; a standard protocol for
+communication between electronic musical instruments; often also used to
+refer to a file format for storing &MIDI; commands.
+</para>
+</glossdef>
+</glossentry>
+
+<glossentry id="gloss-oss">
+<glossterm><acronym>OSS</acronym></glossterm>
+<glossdef>
+<para>
+Open Sound System; the sound drivers included with the &Linux; kernel
+(sometimes called <acronym>OSS</acronym>/Free) or a commercial version
+sold by 4Front Technologies.
+</para>
+</glossdef>
+</glossentry>
+
+</glossary>
diff --git a/doc/artsbuilder/gui.docbook b/doc/artsbuilder/gui.docbook
new file mode 100644
index 00000000..d420bf8a
--- /dev/null
+++ b/doc/artsbuilder/gui.docbook
@@ -0,0 +1,28 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<!--
+<chapter id="gui-elements">
+<title>&GUI; Elements</title>
+
+<sect1 id="gui-introduction">
+<title>Introduction</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="parents">
+<title>Parents</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="mixers">
+<title>Mixers</title>
+<para>
+</para>
+</sect1>
+</chapter>
+-->
diff --git a/doc/artsbuilder/helping.docbook b/doc/artsbuilder/helping.docbook
new file mode 100644
index 00000000..72b2ff2b
--- /dev/null
+++ b/doc/artsbuilder/helping.docbook
@@ -0,0 +1,246 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="contributing">
+<title>Contributing to &arts;</title>
+
+<sect1 id="how-to-help">
+<title>How You Can Help</title>
+
+<para>
+The &arts; project can use help from developers to make existing
+multimedia applications &arts;-aware, write new multimedia applications,
+and enhance the capabilities of &arts;. However, you don't have to be a
+developer to contribute. We can also use help from testers to submit bug
+reports, translators to translate the application text and documentation
+into other languages, artists to design bitmaps (especially for
+<application>artsbuilder</application> modules), musicians to create
+sample &arts; modules, and writers to write or proofread documentation.
+</para>
+</sect1>
+
+<sect1 id="mailing-lists">
+<title>Mailing Lists</title>
+
+<para>
+Most development discussions on &arts; take place on two mailing
+lists. This is the place to discuss new feature and implementation ideas
+or ask for help with problems.
+</para>
+
+<para>
+The &kde; Multimedia mailing list is for general &kde; multimedia issues
+including &arts; as well as the multimedia applications like &noatun;
+and &aktion;. You can subscribe from the web page at
+<ulink url="http://www.kde.org/mailinglists.html">
+http://www.kde.org/mailinglists.html</ulink> or send an email with the
+subject set to <userinput>subscribe
+<replaceable>your-email-address</replaceable></userinput> to
+<email>kde-multimedia-request@kde.org</email>. The list is also archived
+at <ulink url="http://lists.kde.org"> http://lists.kde.org</ulink>.
+</para>
+
+<para>
+The &arts; mailing list is for issues specific to &arts;, including
+non-&kde; use of &arts;. To subscribe, send an email containing the
+message body <userinput>subscribe
+<replaceable>your-email-address</replaceable></userinput> to
+<email>arts-request@space.twc.de</email>. The list is archived at
+<ulink url="http://space.twc.de/~stefan/arts-archive">
+http://space.twc.de/~stefan/arts-archive</ulink>.
+</para>
+
+</sect1>
+
+<sect1 id="coding-standards">
+<title>Coding Standards</title>
+
+<para>
+For getting a consistent reading through all the sources, it is
+important to keep the coding style the same, all over the &arts;
+source. Please, even if you just write a module, try to write/format
+your source accordingly, as it will make it easier for different people
+to maintain the source tree, and easier to copy pieces of from one
+source to another.
+</para>
+
+<variablelist>
+<varlistentry>
+<term>Naming of member functions</term>
+<listitem>
+<para>
+&Qt;/&Java; style. That means capitalization on word breaks, and first
+letter always without capitalization; no underscores.
+</para>
+<para>This means for instance:</para>
+
+<programlisting> createStructureDesc()
+ updateWidget();
+ start(); </programlisting>
+
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Class members</term>
+<listitem>
+<para>
+Class members are not capitalized, such as menubar or button.
+</para>
+
+<para>
+When there are accessing functions, the standard should be the &MCOP;
+way, that is, when having an long member <function>foo</function>, which
+shouldn't be visible directly, you create:
+</para>
+
+<programlisting> foo(long new_value);
+ long foo(); </programlisting>
+
+<para>
+functions to get and set the value. In that case, the real value of
+<function>foo</function> should be stored in
+<returnvalue>&lowbar;foo</returnvalue>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Class names</term>
+<listitem>
+<para>
+All classes should be wordwise capitalized, that means
+<classname>ModuleView</classname>,
+<classname>SynthModule</classname>. All classes that belong to the
+libraries should use the &arts; namespace, like
+<classname>Arts::Soundserver</classname>.
+</para>
+<para>
+The implementations of &MCOP; classes should get called
+<classname>Class&lowbar;impl</classname>, such as
+<classname>SoundServer&lowbar;impl</classname>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Parameters</term>
+<listitem>
+<para>
+Parameters are always uncapitalized.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Local variables</term>
+<listitem>
+<para>
+Local variables are always uncapitalized, and may have names like
+<varname>i</varname>, <varname>p</varname>, <varname>x</varname>, &etc;
+where appropriate.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Tab width (Shift width)</term>
+<listitem>
+<para>
+One tab is as long as 4 spaces.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>Spaces in expressions</term>
+<listitem>
+<para>
+You normally don't need to use spaces in expressions. You can however use
+them between operator and their operands. However, if you put a space before
+an operator (i.e. +), you also need to put a space after the operator. The only
+exception to this are list-like expressions (with ,), where you should only put
+a space after the ",", but not before. It's okay to omit the space here, too.
+</para>
+<para>
+The following examples demonstrate good use of spaces:
+</para>
+<programlisting>
+{
+ int a,b;
+ int c, d, e;
+ int f = 4;
+
+ a=b=c=d+e+f;
+ a = b = c = d + e + f;
+
+ if(a == 4) {
+ a = b = c = (d+e)/2;
+ }
+
+ while(b&lt;3)
+ c--;
+
+ arts_debug("%d\n", c);
+}
+</programlisting>
+<para>
+The following examples demonstrate how <emphasis>not</emphasis> to use spaces.
+For function calls, after if, while, for, switch and so on, no space is being
+written.
+</para>
+<programlisting>
+{
+ // BAD: if you write a list, write spaces only after the ","
+ int a , b , c , d , e , f;
+
+ // BAD: non-symmetric use of spaces for = operator
+ a= 5;
+
+ // BAD: if is considered a function, and isn't followed by a space
+ if (a == 5) {
+ }
+
+ // BAD: don't write a space after while
+ while (a--)
+ b++;
+
+ // BAD: functions names are not followed by a space
+ arts_debug ("%d\n", c);
+
+ // BAD: neither are member names
+ Arts::Object o = Arts::Object::null ();
+}
+</programlisting>
+</listitem>
+</varlistentry>
+
+
+<varlistentry>
+<term>Naming of source files</term>
+<listitem>
+<para>
+Source files should have no capitalization in the name. They should have
+the name of the class when they implement a single class. Their
+extension is <literal role="extension">.cc</literal> if they refer to
+&Qt;/&GUI; independent code, and <literal
+role="extension">.cpp</literal> if they refer to &Qt;/&GUI; dependant
+code. Implementation files for interfaces should be called
+<filename><replaceable>foo</replaceable>_impl</filename>, if Foo was the
+name of the interface.
+</para>
+
+<para>
+&IDL; files should be called in a descriptive way for the collection of
+interfaces they contain, also all lower case. Especially it is not good
+to call an &IDL; file like the class itself, as the .mcopclass trader
+and type info entries will collide, then.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+</sect1>
+
+</chapter>
diff --git a/doc/artsbuilder/images/Doc_MODUL.png b/doc/artsbuilder/images/Doc_MODUL.png
new file mode 100644
index 00000000..fe131e74
--- /dev/null
+++ b/doc/artsbuilder/images/Doc_MODUL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_AUDIO_MANAGER.png b/doc/artsbuilder/images/Gui_AUDIO_MANAGER.png
new file mode 100644
index 00000000..5cc92920
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_AUDIO_MANAGER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_INSTRUMENT_MAPPER.png b/doc/artsbuilder/images/Gui_INSTRUMENT_MAPPER.png
new file mode 100644
index 00000000..19231daa
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_INSTRUMENT_MAPPER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_LABEL.png b/doc/artsbuilder/images/Gui_LABEL.png
new file mode 100644
index 00000000..9ba2ce80
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_LABEL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_MIXER.png b/doc/artsbuilder/images/Gui_MIXER.png
new file mode 100644
index 00000000..f6a036a7
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_MIXER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_PANEL.png b/doc/artsbuilder/images/Gui_PANEL.png
new file mode 100644
index 00000000..c6ce0888
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_PANEL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_POTI.png b/doc/artsbuilder/images/Gui_POTI.png
new file mode 100644
index 00000000..b40bf431
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_POTI.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_SLIDER.png b/doc/artsbuilder/images/Gui_SLIDER.png
new file mode 100644
index 00000000..7e9c83e7
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_SLIDER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_SUBPANEL.png b/doc/artsbuilder/images/Gui_SUBPANEL.png
new file mode 100644
index 00000000..34bc5a77
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_SUBPANEL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Gui_WINDOW.png b/doc/artsbuilder/images/Gui_WINDOW.png
new file mode 100644
index 00000000..677ada48
--- /dev/null
+++ b/doc/artsbuilder/images/Gui_WINDOW.png
Binary files differ
diff --git a/doc/artsbuilder/images/Interface_MIDI_NOTE.png b/doc/artsbuilder/images/Interface_MIDI_NOTE.png
new file mode 100644
index 00000000..bc2d5ad0
--- /dev/null
+++ b/doc/artsbuilder/images/Interface_MIDI_NOTE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Makefile.am b/doc/artsbuilder/images/Makefile.am
new file mode 100644
index 00000000..1b7d1e0f
--- /dev/null
+++ b/doc/artsbuilder/images/Makefile.am
@@ -0,0 +1,4 @@
+
+KDE_LANG = en
+KDE_DOCS = artsbuilder/images
+
diff --git a/doc/artsbuilder/images/Synth_ADD.png b/doc/artsbuilder/images/Synth_ADD.png
new file mode 100644
index 00000000..e06e47a0
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_ADD.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_ATAN_SATURATE.png b/doc/artsbuilder/images/Synth_ATAN_SATURATE.png
new file mode 100644
index 00000000..c8bea2a0
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_ATAN_SATURATE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_BUS_DOWNLINK.png b/doc/artsbuilder/images/Synth_BUS_DOWNLINK.png
new file mode 100644
index 00000000..a2ad7c93
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_BUS_DOWNLINK.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_BUS_UPLINK.png b/doc/artsbuilder/images/Synth_BUS_UPLINK.png
new file mode 100644
index 00000000..3253ce69
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_BUS_UPLINK.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_CDELAY.png b/doc/artsbuilder/images/Synth_CDELAY.png
new file mode 100644
index 00000000..e9df7981
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_CDELAY.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_COMPRESSOR.png b/doc/artsbuilder/images/Synth_COMPRESSOR.png
new file mode 100644
index 00000000..2452a237
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_COMPRESSOR.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_DEBUG.png b/doc/artsbuilder/images/Synth_DEBUG.png
new file mode 100644
index 00000000..9c03a732
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_DEBUG.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_DELAY.png b/doc/artsbuilder/images/Synth_DELAY.png
new file mode 100644
index 00000000..dba8d715
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_DELAY.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_DIV.png b/doc/artsbuilder/images/Synth_DIV.png
new file mode 100644
index 00000000..5b811cdd
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_DIV.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_ENVELOPE_ADSR.png b/doc/artsbuilder/images/Synth_ENVELOPE_ADSR.png
new file mode 100644
index 00000000..31bda8ca
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_ENVELOPE_ADSR.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_FILEPLAY.png b/doc/artsbuilder/images/Synth_FILEPLAY.png
new file mode 100644
index 00000000..c68dec67
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_FILEPLAY.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_FM_SOURCE.png b/doc/artsbuilder/images/Synth_FM_SOURCE.png
new file mode 100644
index 00000000..a0da9390
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_FM_SOURCE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_FREQUENCY.png b/doc/artsbuilder/images/Synth_FREQUENCY.png
new file mode 100644
index 00000000..4e038f69
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_FREQUENCY.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_MIDI_DEBUG.png b/doc/artsbuilder/images/Synth_MIDI_DEBUG.png
new file mode 100644
index 00000000..30c18efa
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_MIDI_DEBUG.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_MIDI_ROUTER.png b/doc/artsbuilder/images/Synth_MIDI_ROUTER.png
new file mode 100644
index 00000000..6115b6b4
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_MIDI_ROUTER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_MUL.png b/doc/artsbuilder/images/Synth_MUL.png
new file mode 100644
index 00000000..0c98e4c8
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_MUL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_NIL.png b/doc/artsbuilder/images/Synth_NIL.png
new file mode 100644
index 00000000..997ef1e0
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_NIL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_PLAY.png b/doc/artsbuilder/images/Synth_PLAY.png
new file mode 100644
index 00000000..7f318b78
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_PLAY.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_PLAY_AKAI.png b/doc/artsbuilder/images/Synth_PLAY_AKAI.png
new file mode 100644
index 00000000..6e5c7988
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_PLAY_AKAI.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_PLAY_AKAIS.png b/doc/artsbuilder/images/Synth_PLAY_AKAIS.png
new file mode 100644
index 00000000..9b8a95cd
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_PLAY_AKAIS.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_PLAY_WAV.png b/doc/artsbuilder/images/Synth_PLAY_WAV.png
new file mode 100644
index 00000000..61d714ea
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_PLAY_WAV.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_PSCALE.png b/doc/artsbuilder/images/Synth_PSCALE.png
new file mode 100644
index 00000000..56e645b9
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_PSCALE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_RC.png b/doc/artsbuilder/images/Synth_RC.png
new file mode 100644
index 00000000..b86b1dfb
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_RC.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_SEQUENCE.png b/doc/artsbuilder/images/Synth_SEQUENCE.png
new file mode 100644
index 00000000..25594c28
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_SEQUENCE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_SEQUENCE_FREQ.png b/doc/artsbuilder/images/Synth_SEQUENCE_FREQ.png
new file mode 100644
index 00000000..07126bf7
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_SEQUENCE_FREQ.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_SHELVE_CUTOFF.png b/doc/artsbuilder/images/Synth_SHELVE_CUTOFF.png
new file mode 100644
index 00000000..9a97e853
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_SHELVE_CUTOFF.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_STD_EQUALIZER.png b/doc/artsbuilder/images/Synth_STD_EQUALIZER.png
new file mode 100644
index 00000000..7a3955f2
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_STD_EQUALIZER.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_STRUCT_KILL.png b/doc/artsbuilder/images/Synth_STRUCT_KILL.png
new file mode 100644
index 00000000..6c4d7927
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_STRUCT_KILL.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_WAVE_SIN.png b/doc/artsbuilder/images/Synth_WAVE_SIN.png
new file mode 100644
index 00000000..6d26eab6
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_WAVE_SIN.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_WAVE_SQUARE.png b/doc/artsbuilder/images/Synth_WAVE_SQUARE.png
new file mode 100644
index 00000000..ba99e85c
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_WAVE_SQUARE.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_WAVE_TRI.png b/doc/artsbuilder/images/Synth_WAVE_TRI.png
new file mode 100644
index 00000000..62083f40
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_WAVE_TRI.png
Binary files differ
diff --git a/doc/artsbuilder/images/Synth_XFADE.png b/doc/artsbuilder/images/Synth_XFADE.png
new file mode 100644
index 00000000..90498689
--- /dev/null
+++ b/doc/artsbuilder/images/Synth_XFADE.png
Binary files differ
diff --git a/doc/artsbuilder/images/schema1.png b/doc/artsbuilder/images/schema1.png
new file mode 100644
index 00000000..8bd236d0
--- /dev/null
+++ b/doc/artsbuilder/images/schema1.png
Binary files differ
diff --git a/doc/artsbuilder/images/schema2.png b/doc/artsbuilder/images/schema2.png
new file mode 100644
index 00000000..2d70af0b
--- /dev/null
+++ b/doc/artsbuilder/images/schema2.png
Binary files differ
diff --git a/doc/artsbuilder/images/schema3.png b/doc/artsbuilder/images/schema3.png
new file mode 100644
index 00000000..9e1adf41
--- /dev/null
+++ b/doc/artsbuilder/images/schema3.png
Binary files differ
diff --git a/doc/artsbuilder/images/schema4.png b/doc/artsbuilder/images/schema4.png
new file mode 100644
index 00000000..834ac2ed
--- /dev/null
+++ b/doc/artsbuilder/images/schema4.png
Binary files differ
diff --git a/doc/artsbuilder/index.docbook b/doc/artsbuilder/index.docbook
new file mode 100644
index 00000000..ba6649a1
--- /dev/null
+++ b/doc/artsbuilder/index.docbook
@@ -0,0 +1,393 @@
+<?xml version="1.0" ?>
+<!DOCTYPE book PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd" [
+ <!ENTITY kappname "&arts;" >
+ <!ENTITY tools SYSTEM "tools.docbook">
+ <!ENTITY artsbuilder-doc SYSTEM "artsbuilder.docbook">
+ <!ENTITY detail SYSTEM "detail.docbook">
+ <!ENTITY arts-midi SYSTEM "midi.docbook">
+ <!ENTITY gui SYSTEM "gui.docbook">
+ <!ENTITY mcop-ref SYSTEM "mcop.docbook">
+ <!ENTITY arts-mcop SYSTEM "mcop.docbook">
+ <!ENTITY apis SYSTEM "apis.docbook">
+ <!ENTITY modules SYSTEM "modules.docbook">
+ <!ENTITY porting SYSTEM "porting.docbook">
+ <!ENTITY helping SYSTEM "helping.docbook">
+ <!ENTITY future SYSTEM "future.docbook">
+ <!ENTITY references SYSTEM "references.docbook">
+ <!ENTITY arts-faq SYSTEM "faq.docbook">
+ <!ENTITY arts-glossary SYSTEM "glossary.docbook">
+ <!ENTITY digitalaudio SYSTEM "digitalaudio.docbook">
+ <!ENTITY midiintro SYSTEM "midiintro.docbook">
+ <!ENTITY MCOP "<acronym>MCOP</acronym>">
+ <!ENTITY DCOP "<acronym>DCOP</acronym>">
+ <!ENTITY MIDI "<acronym>MIDI</acronym>">
+ <!ENTITY mcopidl "<application>mcopidl</application>">
+ <!ENTITY IDL "<acronym>IDL</acronym>">
+ <!ENTITY % English "INCLUDE" > <!-- change language only here -->
+ <!ENTITY % addindex "IGNORE">
+]>
+
+<book lang="&language;">
+<bookinfo>
+<title>The &arts; Handbook</title>
+<authorgroup>
+
+<author>
+<firstname>Stefan</firstname>
+<surname>Westerfeld</surname>
+<affiliation>
+<address><email>stefan@space.twc.de</email></address>
+</affiliation>
+</author>
+
+<author>
+<firstname>Jeff</firstname>
+<surname>Tranter</surname>
+<affiliation>
+<address><email>tranter@kde.org</email></address>
+</affiliation>
+</author>
+
+<!-- TRANS:ROLES_OF_TRANSLATORS -->
+</authorgroup>
+
+<copyright>
+<year>1999-2001</year>
+<holder>Stefan Westerfeld &amp; Jeff Tranter</holder>
+</copyright>
+<legalnotice>&FDLNotice;</legalnotice>
+
+<date>2001-06-10</date>
+<releaseinfo>1.00.09</releaseinfo>
+
+<abstract><para>This handbook describes &arts;, the Analog Real-time
+Synthesizer.</para>
+
+</abstract>
+
+<keywordset>
+<keyword>aRts</keyword>
+<keyword>artsbuilder</keyword>
+<keyword>synthesizer</keyword>
+<keyword>multimedia</keyword>
+<keyword>structure</keyword>
+<keyword>music</keyword>
+<keyword>sound</keyword>
+<keyword>KDE</keyword>
+</keywordset>
+</bookinfo>
+
+<chapter id="introduction">
+<title>Introduction</title>
+
+<sect1 id="what-is-arts">
+<title>What is &arts;?</title>
+
+<para>The Analog Real-Time Synthesizer, or &arts;, is a modular system
+for synthesizing sound and music on a digital computer. Using small
+building blocks called modules, the user can easily build complex audio
+processing tools. Modules typically provide functions such as sound
+waveform generators, filters, audio effects, mixing, and playback of
+digital audio in different file formats.</para>
+
+<para>The &artsd; sound server mixes audio from several sources in real
+time, allowing multiple sound applications to transparently share access
+to sound hardware.</para>
+
+<para>Using &MCOP;, the Multimedia Communication Protocol, multimedia
+applications can be network transparent, authenticated for security, and
+cross-platform using interfaces defined in a language-independent way
+using &IDL;. Support is also provided for non &arts;-aware legacy
+applications. As a core component of the &kde; 2 desktop environment,
+&arts; provides the basis for the &kde; multimedia architecture, and
+will in future support more media types including video. Like &kde;,
+&arts; runs on a number of operating systems, including &Linux; and BSD
+variants. It can also be used independently of &kde;.</para>
+
+</sect1>
+
+<sect1 id="using-this-manual">
+<title>Using This Manual</title>
+
+<para>This manual is intended to provide comprehensive documentation on
+&arts; for users at different skill levels. Depending on whether you are
+a casual user of multimedia applications that make use of &arts; or a
+multimedia application developer, you may want to take different paths
+through the manual.</para>
+
+<para>It is suggested that you first read the <link
+linkend="installation">Downloading and Building &arts;</link> chapter if
+you need to get &arts; initially installed and running. If you already
+have a working system, likely bundled with your operating system
+distribution, you may choose to skip this section.</para>
+
+<para>You should then read the sections in the <link
+linkend="arts-tools">&arts; Tools</link> chapter, especially &artsd;,
+artscontrol;, &artsshell;, and &artsdsp;. This will help you make the
+most effective use of &arts;.</para>
+
+<para>If you are interested in going further with &arts;, read the
+chapter on <link linkend="artsbuilder">&arts-builder;</link> and go
+through the tutorial. This should give you an appreciation of the
+powerful capabilities of &arts; and the provided modules that can be
+used without the need to be a programmer.</para>
+
+<para>If you want to know more about the internals of &arts;, either to
+develop multimedia applications or extend &arts; itself, read some or
+all of the chapter <link linkend="arts-in-detail">&arts; in
+Detail</link>. This should give you an understanding of all of the
+concepts that are prerequisites to &arts; software development.</para>
+
+<para>If you are interested specifically in the <acronym>MIDI</acronym>
+capabilities of &arts;, you should read the chapter on <link
+linkend="midi">&MIDI;</link>.</para>
+
+<!-- TODO
+<para>To learn more about the &arts; graphical elements, either as an advanced
+user of artsbuilder or to create new elements, read the section on <link
+linkend="gui-elements"><acronym>GUI</acronym> Elements</link>.</para>
+-->
+
+<para>If you want to develop &arts;-aware multimedia applications, the
+<link linkend="arts-apis">&arts; Application Programming
+Interfaces</link> chapter covers the different <acronym>API</acronym>s
+in detail.</para>
+
+<para>If you want to extend &arts; by creating new modules, read the
+<link linkend="arts-modules">&arts; Modules</link> chapter.</para>
+
+<para>If you are modifying an existing application to run under &arts;,
+read the chapter on <link linkend="porting">Porting Applications to
+&arts;</link>.</para>
+
+<para>You you can find out how to help contribute to the &arts; project
+in the <link linkend="contributing">Contributing to &arts;</link>
+chapter, read about upcoming &arts; development in the chapter on <link
+linkend="future-work">Future Work</link>, and find links to more
+information in the <link linkend="references">References</link>
+section.</para>
+
+<para>We have also rounded out the manual with some additional material,
+including <link linkend="faq">answers to frequently asked
+questions</link>, a <link linkend="contributors">list of
+contributors</link>, the details on &arts; <link
+linkend="copyright-and-licenses">copyright and licensing</link>, and
+some background material on <link linkend="intro-digital-audio">digital
+audio</link> and <link
+linkend="midi-introduction">&MIDI;</link>. A <link
+linkend="glossary">glossary</link> of terms is also included.</para>
+
+<note>
+<para>
+This manual is still very much a work in progress. You are welcome to
+contribute by writing portions of it, but if you wish to do so, contact
+Jeff Tranter <email>tranter@kde.org</email> or Stefan Westerfeld
+<email>stefan@space.twc.de</email> first to avoid duplication of effort.
+</para>
+</note>
+
+</sect1>
+
+<sect1 id="history">
+<title>History</title>
+
+<para>
+In late 1997 Stefan Westerfeld started working on a real-time, modular
+system for sound synthesis. The code initially ran on a PowerPC system
+running &AIX;. This first implementation was quite simple but supported
+a full-featured flow system that was able to do such things as play MP3
+files and pipe audio streams through effects modules.
+</para>
+
+
+<para>The next step was to implement a &GUI; so that modules could be
+manipulated graphically. Stefan had had some good experience using
+&kde;, so that was chosen as the &GUI; toolkit, (knowing that it might
+be necessary to do a GNOME/Gtk+ version as well) and this later led to
+using &Linux; as the main development platform. Originally named
+<application>ksynth</application>, the project was renamed &arts; and
+the pace of development accelerated. The project at this stage was quite
+complete, with a <acronym>CORBA</acronym>-based protocol, dozens of
+modules, a graphical module editing tool, C and C++
+<acronym>API</acronym>s, documentation, utilities, and a mailing list
+and web site with a small group of developers. The project had come a
+long way after only a little more than a year of development.</para>
+
+<para>As the &kde; team started planning for &kde; 2.0, it became clear
+that &kde; needed a more powerful infrastructure for sound and other
+streaming media. It was decided to adapt &arts;, as it was a good step
+in this direction with a proven architecture. Much new development
+effort went into this new version of &arts;, most notably the
+replacement of the <acronym>CORBA</acronym> code with an entirely new
+subsystem, &MCOP;, optimized for multimedia. Version 0.4 of &arts; was
+included in the &kde; 2.0 release.</para>
+
+<para>Work continues on &arts;, improving performance and adding new
+functionality. It should be noted that even though &arts; is now a core
+component of &kde;, it can be used without &kde;, and is also being used
+for applications that go beyond traditional multimedia. The project has
+attracted some interest from the GNOME team, opening up the possibility
+that it may someday become the standard multimedia architecture for
+&UNIX; desktop systems.</para>
+
+</sect1>
+
+</chapter>
+
+&tools;
+&artsbuilder-doc;
+&detail;
+&arts-midi;
+&gui;
+&mcop-ref;
+&apis;
+&modules;
+&porting;
+&helping;
+&future;
+&references;
+&arts-faq;
+
+<chapter id="copyright-and-licenses">
+
+<title>&arts; Copyright and Licensing</title>
+
+<para>&arts; software copyright 1998-2001 Stefan Westerfeld
+<email>stefan@space.twc.de</email></para>
+
+<para><anchor id="contributors" />
+Documentation copyright 1999-2001
+Stefan Westerfeld <email>stefan@space.twc.de</email> and
+Jeff Tranter <email>tranter@kde.org</email>.
+</para>
+<!-- TRANS:CREDIT_FOR_TRANSLATORS -->
+
+&underFDL;
+
+<para>
+All libraries that are in &arts; are licensed under the terms of the
+<acronym>GNU</acronym> Lesser General Public license. The vast majority of the
+&arts; code is in the libraries, including the whole of <acronym>MCOP</acronym>
+and ArtsFlow. This allows the libraries to be used for non-free/non-open source
+applications if desired.
+</para>
+
+<para>There are a few programs (such as <application>artsd</application>), that
+are released under the terms of the <acronym>GNU</acronym> General Public
+License. As there have been different opinions on whether or not linking
+<acronym>GPL</acronym> programs with &Qt; is legal, I also added an explicit
+notice which allows that, in addition to the <acronym>GPL</acronym>: permission
+is also granted to link this program with the &Qt; library, treating &Qt; like a
+library that normally accompanies the operating system kernel, whether or not
+that is in fact the case.</para>
+
+</chapter>
+
+<appendix id="installation">
+<title>Installing &arts;</title>
+
+<para>
+In order to use &arts; you obviously need to have it installed and running on
+your system. There are two approaches for doing this, which are described in the
+next sections.
+</para>
+
+<sect1 id="binary-install">
+<title>Installing a Precompiled Binary Release</title>
+
+<para>
+The quickest and easiest way to get &arts; up and running is to install
+precompiled binary packages for your system. Most recent &Linux; distributions
+include &kde;, and if it is &kde; 2.0 or later it will include &arts;. If &kde;
+is not included on your installation media it may be available as a download
+from your operating system vendor. Alternatively it may be available from third
+parties. Make sure that you use packages that are compatible with your operating
+system version.
+</para>
+
+<para>
+A basic install of &kde; will include the sound server, allowing most
+applications to play sound. If you want the full set of multimedia tools and
+applications you will likely need to install additional optional packages.
+</para>
+
+<para>
+The disadvantage of using precompiled binaries is that they may not be the most
+recent version of &arts;. This is particularly likely if they are provided on
+&CD-ROM;, as the pace of development of &arts; and &kde; is such that &CD-ROM;
+media cannot usually keep pace. You may also find that, if you have one of the
+less common architectures or operating system distributions, precompiled binary
+packages may not be available and you will need to use the second method.
+</para>
+
+</sect1>
+
+<sect1 id="source-install">
+<title>Building From Source</title>
+
+<para>
+While time consuming, the most flexible way to build &arts; is to compile it
+yourself from source code. This ensures you have a version compiled optimally
+for your system configuration and allows you to build the most recent version.
+</para>
+
+<para>
+You have two choices here -- you can either install the most recent stable
+version included with &kde; or you can get the most recent (but possibly
+unstable) version directly from the &kde; project <acronym>CVS</acronym>
+repository. Most users who aren't developing for &arts; should use the stable
+version. You can download it from <ulink
+url="ftp://ftp.kde.org">ftp://ftp.kde.org</ulink> or one of the many mirror
+sites. If you are actively developing for &arts; you probably want to use the
+<acronym>CVS</acronym> version. If you want to use aRts without KDE, you can
+download a standalone development snapshot from
+<ulink url="http://space.twc.de/~stefan/kde/arts-snapshot-doc.html">
+http://space.twc.de/~stefan/kde/arts-snapshot-doc.html</ulink>.
+</para>
+
+<para>
+Note that if you are building from <acronym>CVS</acronym>, some components
+of &arts; (&ie; the basic core components including the sound server) are found
+in the <acronym>CVS</acronym> module kdelibs, while additional components (&eg;
+<application>artsbuilder</application>) are included in the. This may change in
+the future. You may also find a version in the kmusic module; this is the old
+(pre-&kde; 2.0) version which is now obsolete.
+</para>
+
+<para>
+The requirements for building &arts; are essentially the same as for building
+&kde;. The configure scripts should detect your system configuration and
+indicate if any required components are missing. Make sure that you have a
+working sound driver on your system (either the <acronym>OSS</acronym>/Free
+driver in the kernel, <acronym>OSS</acronym> driver from 4Front
+Technologies, or
+<acronym>ALSA</acronym> driver with <acronym>OSS</acronym> emulation).
+</para>
+
+<para>More information on downloading and installing &kde; (including &arts;)
+can be found in the <ulink
+url="http://www.kde.org/documentation/faq/index.html">&kde;
+&FAQ;</ulink>.</para>
+
+</sect1>
+
+</appendix>
+
+&digitalaudio;
+&midiintro;
+&arts-glossary;
+
+</book>
+<!--
+Local Variables:
+mode: sgml
+sgml-omittag:nil
+sgml-shorttag:t
+sgml-namecase-general:t
+sgml-general-insert-case:lower
+sgml-minimize-attributes:nil
+sgml-always-quote-attributes:t
+sgml-indent-step:0
+sgml-indent-data:nil
+End:
+-->
diff --git a/doc/artsbuilder/mcop.docbook b/doc/artsbuilder/mcop.docbook
new file mode 100644
index 00000000..86aa03b5
--- /dev/null
+++ b/doc/artsbuilder/mcop.docbook
@@ -0,0 +1,2274 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="mcop">
+<title>&MCOP;: Object Model and Streaming</title>
+
+<sect1 id="mcop-overview">
+
+<title>Overview</title>
+
+<para>
+&MCOP; is the standard &arts; uses for:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Communication between objects.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Network transparency.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Describing object interfaces.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Language independancy.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+One major aspect of &MCOP; is the <emphasis>interface description
+language</emphasis>, &IDL;, in which many of the &arts; interfaces and
+<acronym>API</acronym>s are defined in a language independent way.
+</para>
+
+<para>
+To use &IDL; interfaces from C++, is compiled by the &IDL;
+compiler into C++ code. When you implement an interface, you derive from
+the skeleton class the &IDL; compiler has generated. When you use an
+interface, you do so using a wrapper. This way, &MCOP; can use a
+protocol if the object you are talking to is not local - you get network
+transparency.
+</para>
+
+<para>
+This chapter is supposed to describe the basic features of the object
+model that results from the use of &MCOP;, the protocol, how do use
+&MCOP; in C++ (language binding), and so on.
+</para>
+
+</sect1>
+
+<sect1 id="interfaces">
+
+<title>Interfaces and &IDL;</title>
+
+<para>
+Many of the services provided by &arts;, such as modules and the sound
+server, are defined in terms of <acronym>interfaces</acronym>.
+Interfaces are specified in a programming language independent format:
+&IDL;.
+</para>
+
+<para>
+This allows many of the implementation details such as the format of
+multimedia data streams, network transparency, and programming language
+dependencies, to be hidden from the specification for the interface. A
+tool, &mcopidl;, translates the interface
+definition into a specific programming language (currently only C++ is
+supported).
+</para>
+
+<para>
+The tool generates a skeleton class with all of the boilerplate code and
+base functionality. You derive from that class to implement the features
+you want.
+</para>
+
+<para>
+The &IDL; used by &arts; is similar to that used by
+<acronym>CORBA</acronym> and <acronym>DCOM</acronym>.
+</para>
+
+<para>
+&IDL; files can contain:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+C-style #include directives for other &IDL; files.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Definitions of enumerated and struct types, as in C/C++.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Definitions of interfaces.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+In &IDL;, interfaces are defined much like a C++ class or C struct,
+albeit with some restrictions. Like C++, interfaces can subclass other
+interfaces using inheritance. Interface definitions can include three
+things: streams, attributes, and methods.
+</para>
+
+<sect2 id="streams">
+
+<title>Streams</title>
+
+<para>
+Streams define multimedia data, one of the most important components of
+a module. Streams are defined in the following format:
+</para>
+
+<para>
+[ async ] in|out [ multi ] <replaceable>type</replaceable> stream <replaceable>name</replaceable> [ , <replaceable>name</replaceable> ] ;
+</para>
+
+<para>
+Streams have a defined direction in reference to the module, as
+indicated by the required qualifiers in or out. The type argument
+defines the type of data, which can be any of the types described later
+for attributes (not all are currently supported). Many modules use the
+stream type audio, which is an alias for float since that is the
+internal data format used for audio stream. Multiple streams of the same
+type can defined in the same definition uisng comma separated names.
+</para>
+
+<para>
+Streams are by default synchronous, which means they are continuous
+flows of data at a constant rate, such as <acronym>PCM</acronym>
+audio. The async qualifier specifies an asynchronous stream, which is
+used for non-continuous data flows. The most common example of an async
+stream is &MIDI; messages.
+</para>
+
+<para>
+The multi keyword, only valid for input streams, indicates that the
+interface supports a variable number of inputs. This is useful for
+implementing devices such as mixers that can accept any number of input
+streams.
+</para>
+
+</sect2>
+<sect2 id="attributes">
+
+<title>Attributes</title>
+
+<para>
+Attributes are data associated with an instance of an interface. They
+are declared like member variables in C++, and can can use any of the
+primitive types boolean, byte, long, string, or float. You can also use
+user-defined struct or enum types as well as variable sized sequences
+using the syntax sequence&lt;type&gt;. Attributes can optionally be
+marked readonly.
+</para>
+
+</sect2>
+<sect2 id="methods">
+
+<title>Methods</title>
+
+<para>
+As in C++, methods can be defined in interfaces. The method parameters
+are restricted to the same types as attributes. The keyword oneway
+indicates a method which returns immediately and is executed
+asynchronously.
+</para>
+
+</sect2>
+
+<sect2 id="standardinterfaces">
+
+<title>Standard Interfaces</title>
+
+<para>
+Several standard module interfaces are already defined for you in
+&arts;, such as <interfacename>StereoEffect</interfacename>, and
+<interfacename>SimpleSoundServer</interfacename>.
+</para>
+
+</sect2>
+
+<sect2 id="example">
+<title>Example</title>
+
+<para>
+A simple example of a module taken from &arts; is the constant delay
+module, found in the file
+<filename>kdemultimedia/arts/modules/artsmodules.idl</filename>. The
+interface definition is listed below.
+</para>
+
+<programlisting>
+interface Synth_CDELAY : SynthModule {
+ attribute float time;
+ in audio stream invalue;
+ out audio stream outvalue;
+};
+</programlisting>
+
+<para>
+This modules inherits from
+<interfacename>SynthModule</interfacename>. That interface, defined in
+<filename>artsflow.idl</filename>, defines the standard methods
+implemented in all music synthesizer modules.
+</para>
+
+<para>
+The CDELAY effect delays a stereo audio stream by the time value
+specified as a floating point parameter. The interface definition has an
+attribute of type float to store the delay value. It defines two input
+audio streams and two output audio streams (typical of stereo
+effects). No methods are required other than those it inherits.
+</para>
+
+</sect2>
+
+</sect1>
+
+<sect1 id="more-about-streams">
+<title>More About Streams</title>
+
+<para>
+This section covers some additional topics related to streams.
+</para>
+
+<sect2 id="stream-types">
+<title>Stream Types</title>
+
+<para>
+There are various requirements for how a module can do streaming. To
+illustrate this, consider these examples:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Scaling a signal by a factor of two.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Performing sample frequency conversion.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Decompressing a run-length encoded signal.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Reading &MIDI; events from <filename
+class="devicefile">/dev/midi00</filename> and inserting them into a
+stream.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+The first case is the simplest: upon receiving 200 samples of input the
+module produces 200 samples of output. It only produces output when it
+gets input.
+</para>
+
+<para>
+The second case produces different numbers of output samples when given
+200 input samples. It depends what conversion is performed, but the
+number is known in advance.
+</para>
+
+<para>
+The third case is even worse. From the outset you cannot even guess how
+much data 200 input bytes will generate (probably a lot more than 200
+bytes, but...).
+</para>
+
+<para>
+The last case is a module which becomes active by itself, and sometimes
+produces data.
+</para>
+
+<para>
+In &arts;s-0.3.4, only streams of the first type were handled, and most
+things worked nicely. This is probably what you need most when writing
+modules that process audio. The problem with the other, more complex
+types of streaming, is that they are hard to program, and that you don't
+need the features most of the time. That is why we do this with two
+different stream types: synchronous and asynchronous.
+</para>
+
+<para>
+Synchronous streams have these characteristics:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Modules must be able to calculate data of any length, given enough
+input.
+</para>
+</listitem>
+
+<listitem>
+<para>
+All streams have the same sampling rate.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The <function>calculateBlock()</function> function will be called when
+enough data is available, and the module can rely on the pointers
+pointing to data.
+</para>
+</listitem>
+
+<listitem>
+<para>
+There is no allocation and deallocation to be done.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+Asynchronous streams, on the other hand, have this behavior:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Modules may produce data sometimes, or with varying sampling rate, or
+only if they have input from some filed descriptor. They are not bound by
+the rule <quote>must be able to satisfy requests of any size</quote>.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Asynchronous streams of a module may have entirely different sampling
+rates.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Outgoing streams: there are explicit functions to allocate packets, to
+send packets - and an optional polling mechanism that will tell you when
+you should create some more data.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Incoming streams: you get a call when you receive a new packet - you
+have to say when you are through with processing all data of that
+packet, which must not happen at once (you can say that anytime later,
+and if everybody has processed a packet, it will be freed/reused)
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+When you declare streams, you use the keyword <quote>async</quote> to
+indicate you want to make an asynchronous stream. So, for instance,
+assume you want to convert an asynchronous stream of bytes into a
+synchronous stream of samples. Your interface could look like this:
+</para>
+
+<programlisting>
+interface ByteStreamToAudio : SynthModule {
+ async in byte stream indata; // the asynchronous input sample stream
+
+ out audio stream left,right; // the synchronous output sample streams
+};
+</programlisting>
+
+</sect2>
+
+<sect2 id="async-streams">
+<title>Using Asynchronous Streams</title>
+
+<para>
+Suppose you decided to write a module to produce sound
+asynchronously. Its interface could look like this:
+</para>
+
+<programlisting>
+interface SomeModule : SynthModule
+{
+ async out byte stream outdata;
+};
+</programlisting>
+
+<para>
+How do you send the data? The first method is called <quote>push
+delivery</quote>. With asynchronous streams you send the data as
+packets. That means you send individual packets with bytes as in the
+above example. The actual process is: allocate a packet, fill it, send
+it.
+</para>
+
+<para>
+Here it is in terms of code. First we allocate a packet:
+</para>
+
+<programlisting>
+DataPacket&lt;mcopbyte&gt; *packet = outdata.allocPacket(100);
+</programlisting>
+
+<para>
+The we fill it:
+</para>
+
+<programlisting>
+// cast so that fgets is happy that it has a (char *) pointer
+char *data = (char *)packet-&gt;contents;
+
+// as you can see, you can shrink the packet size after allocation
+// if you like
+if(fgets(data,100,stdin))
+ packet-&gt;size = strlen(data);
+else
+ packet-&gt;size = 0;
+</programlisting>
+
+<para>
+Now we send it:
+</para>
+
+<programlisting>
+packet-&gt;send();
+</programlisting>
+
+<para>
+This is quite simple, but if we want to send packets exactly as fast as
+the receiver can process them, we need another approach, the <quote>pull
+delivery</quote> method. You ask to send packets as fast as the receiver
+is ready to process them. You start with a certain amount of packets you
+send. As the receiver processes one packet after another, you start
+refilling them with fresh data, and send them again.
+</para>
+
+<para>
+You start that by calling setPull. For example:
+</para>
+
+<programlisting>
+outdata.setPull(8, 1024);
+</programlisting>
+
+<para>
+This means that you want to send packets over outdata. You want to start
+sending 8 packets at once, and as the receiver processes some of them,
+you want to refill them.
+</para>
+
+<para>
+Then, you need to implement a method which fills the packets, which could
+look like this:
+</para>
+
+<programlisting>
+void request_outdata(DataPacket&lt;mcopbyte&gt; *packet)
+{
+ packet-&gt;size = 1024; // shouldn't be more than 1024
+ for(int i = 0;i &lt; 1024; i++)
+ packet-&gt;contents[i] = (mcopbyte)'A';
+ packet-&gt;send();
+}
+</programlisting>
+
+<para>
+Thats it. When you don't have any data any more, you can start sending
+packets with zero size, which will stop the pulling.
+</para>
+
+<para>
+Note that it is essential to give the method the exact name
+<methodname>request_<replaceable>streamname</replaceable></methodname>.
+</para>
+
+<para>
+We just discussed sending data. Receiving data is much much
+simpler. Suppose you have a simple ToLower filter, which simply converts
+all letters in lowercase:
+</para>
+
+<programlisting>
+interface ToLower {
+ async in byte stream indata;
+ async out byte stream outdata;
+};
+</programlisting>
+
+<para>
+This is really simple to implement; here is the whole implementation:
+</para>
+
+<programlisting>
+class ToLower_impl : public ToLower_skel {
+public:
+ void process_indata(DataPacket&lt;mcopbyte&gt; *inpacket)
+ {
+ DataPacket&lt;mcopbyte&gt; *outpacket = outdata.allocPacket(inpacket-&gt;size);
+
+ // convert to lowercase letters
+ char *instring = (char *)inpacket-&gt;contents;
+ char *outstring = (char *)outpacket-&gt;contents;
+
+ for(int i=0;i&lt;inpacket-&gt;size;i++)
+ outstring[i] = tolower(instring[i]);
+
+ inpacket-&gt;processed();
+ outpacket-&gt;send();
+ }
+};
+
+REGISTER_IMPLEMENTATION(ToLower_impl);
+</programlisting>
+
+<para>
+Again, it is essential to name the method
+<methodname>process_<replaceable>streamname</replaceable></methodname>.
+</para>
+
+<para>
+As you see, for each arriving packet you get a call for a function (the
+<function>process_indata</function> call in our case). You need to call
+the <methodname>processed()</methodname> method of a packet to indicate
+you have processed it.
+</para>
+
+<para>
+Here is an implementation tip: if processing takes longer (&ie; if you
+need to wait for soundcard output or something like that), don't call
+processed immediately, but store the whole data packet and call
+processed only as soon as you really processed that packet. That way,
+senders have a chance to know how long it really takes to do your work.
+</para>
+
+<para>
+As synchronization isn't so nice with asynchronous streams, you should
+use synchronous streams wherever possible, and asynchronous streams only
+when necessary.
+</para>
+
+</sect2>
+
+<sect2 id="default-streams">
+<title>Default Streams</title>
+
+<para>
+Suppose you have 2 objects, for example an AudioProducer and an
+AudioConsumer. The AudioProducer has an output stream and AudioConsumer
+has an input one. Each time you want to connect them, you will use those
+2 streams. The first use of defaulting is to enable you to make the
+connection without specifying the ports in that case.
+</para>
+
+<para>
+Now suppose the teo objects above can handle stereo, and each have a
+<quote>left</quote> and <quote>right</quote> port. You'd still like to
+connect them as easily as before. But how can the connecting system
+know which output port to connect to which input port? It has no way to
+correctly map the streams. Defaulting is then used to specify several
+streams, with an order. Thus, when you connect an object with 2 default
+output streams to another one with 2 default input streams, you don't
+have to specify the ports, and the mapping will be done correctly.
+</para>
+
+<para>
+Of course, this is not limited to stereo. Any number of streams can be
+made default if needed, and the connect function will check that the
+number of defaults for 2 object match (in the required direction) if you
+don't specify the ports to use.
+</para>
+
+<para>
+The syntax is as follows: in the &IDL;, you can use the default keyword
+in the stream declaration, or on a single line. For example:
+</para>
+
+<programlisting>
+interface TwoToOneMixer {
+ default in audio stream input1, input2;
+ out audio stream output;
+};
+</programlisting>
+
+<para>
+In this example, the object will expect its two input ports to be
+connected by default. The order is the one specified on the default
+line, so an object like this one:
+</para>
+
+<programlisting>
+interface DualNoiseGenerator {
+ out audio stream bzzt, couic;
+ default couic, bzzt;
+};
+</programlisting>
+
+<para>
+Will make connections from <quote>couic</quote> to
+<quote>input1</quote>, and <quote>bzzt</quote> to <quote>input2</quote>
+automatically. Note that since there is only one output for the mixer,
+it will be made default in this case (see below). The syntax used in the
+noise generator is useful to declare a different order than the
+declaration, or selecting only a few ports as default. The directions of
+the ports on this line will be looked up by &mcopidl;, so don't specify
+them. You can even mix input and output ports in such a line, only the
+order is important.
+</para>
+
+<para>
+There are some rules that are followed when using inheritance:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+If a default list is specified in the &IDL;, then use
+it. Parent ports can be put in this list as well, whether they were
+default in the parent or not.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Otherwise, inherit parent's defaults. Ordering is parent1 default1,
+parent1 default2..., parent2 default1... If there is a common ancestor
+using 2 parent branches, a <quote>virtual public</quote>-like merging is
+done at that default's first occurrence in the list.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If there is still no default and a single stream in a
+direction, use it as default for that direction.
+</para>
+</listitem>
+</itemizedlist>
+
+</sect2>
+
+</sect1>
+<sect1 id="attribute-change-notify">
+<title>Attribute change notifications</title>
+
+<!-- TODO: This should be embedded better into the context - I mean: the
+ context should be written ;-). -->
+
+<para>
+Attribute change notifications are a way to know when an attribute
+changed. They are a bit comparable with &Qt;'s or Gtk's signals and
+slots. For instance, if you have a &GUI; element, a slider, which
+configures a number between 0 and 100, you will usually have an object
+that does something with that number (for instance, it might be
+controlling the volume of some audio signal). So you would like that
+whenever the slider is moved, the object which scales the volume gets
+notified. A connection between a sender and a receiver.
+</para>
+
+<para>
+&MCOP; deals with that by being able to providing notifications when
+attributes change. Whatever is declared as <quote>attribute</quote> in
+the &IDL;, can emit such change notifications, and should do so,
+whenever it is modified. Whatever is declared as
+<quote>attribute</quote> can also receive such change notifications. So
+for instance if you had two &IDL; interfaces, like these:
+</para>
+
+<programlisting>
+ interface Slider {
+ attribute long min,max;
+ attribute long position;
+ };
+ interface VolumeControl : Arts::StereoEffect {
+ attribute long volume; // 0..100
+ };
+</programlisting>
+
+<para>
+You can connect them using change notifications. It works using the
+normal flowsystem connect operation. In this case, the C++ code to
+connect two objects would look like this:
+</para>
+
+<programlisting>
+#include &lt;connect.h&gt;
+using namespace Arts;
+[...]
+connect(slider,"position_changed",volumeControl,"volume");
+</programlisting>
+
+<para>
+As you see, each attribute offers two different streams, one for sending
+the change notifications, called
+<function><replaceable>attributename</replaceable>_changed</function>,
+
+<!-- TODO - how do I markup code that is an example - you wouldn't write
+ attributename in the source, but the name of some attribute
+
+ LW: I'm guessing
+ here, because I know how to markup QT code, but your stuff is different.
+ Hopefully this will give you inspiration, and we can work out later the fine
+ tuning if I have it wrong. The line above in the code sample, if it were qt
+ stuff, I would mark up this way (linebreaks for clarity of markup only, yes I
+ know it's incorrect!):
+
+ <function>connect(<classname>slider</classname>,
+ <function><replaceable>position</replaceable>_changed</function>,
+ <classname>volumeControl</classname>,
+ <function>volume</function>);</function>
+
+ You can use <function><replaceable>attributename</function> and even
+ <function><replaceable>attributename</replaceable>_changed</function>.
+
+ If I have the above totally wrong (which is entirely possible!) Some other
+ elements you might find handy:
+
+ <varname>, <type>, <returnvalue>, <constant>, <methodname>
+ There's also a markup guide at http://madmax.atconnex.net/kde/ that might
+ help, although unfortunately the programming section is still incomplete. -->
+
+ and one for receiving change notifications, called
+<function>attributename</function>.
+</para>
+
+<para>
+It is important to know that change notifications and asynchronous
+streams are compatible. They are also network transparent. So you can
+connect a change notification of a float attribute of a &GUI; widget has
+to an asynchronous stream of a synthesis module running on another
+computer. This of course also implies that change notifications are
+<emphasis>not synchronous</emphasis>, this means, that after you have
+sent the change notification, it may take some time until it really gets
+received.
+</para>
+
+<sect2 id="sending-change-notifications">
+
+<title>Sending change notifications</title>
+
+<para>
+When implementing objects that have attributes, you need to send change
+notifications whereever an attribute changes. The code for doing this
+looks like this:
+</para>
+
+<programlisting>
+ void KPoti_impl::value(float newValue)
+ {
+ if(newValue != _value)
+ {
+ _value = newValue;
+ value_changed(newValue); // &lt;- send change notification
+ }
+ }
+</programlisting>
+
+<para>
+It is strongly recommended to use code like this for all objects you
+implement, so that change notifications can be used by other people. You
+should however void sending notifications too often, so if you are doing
+signal processing, it is probably the best if you keep track when you
+sent your last notification, so that you don't send one with every
+sample you process.
+</para>
+
+</sect2>
+
+<sect2 id="change-notifications-apps">
+<title>Applications for change notifications</title>
+
+<para>
+It will be especially useful to use change notifications in conjunction
+with scopes (things that visualize audio data for instance), gui
+elements, control widgets, and monitoring. Code using this is in
+<filename class="directory">kdelibs/arts/tests</filename>, and in the
+experimental artsgui implementation, which you can find under <filename
+class="directory">kdemultimedia/arts/gui</filename>.
+</para>
+
+<!-- TODO: can I markup links into the source code - if yes, how? -->
+
+<!-- LW: Linking into the source is problematic - we can't assume people are
+reading this on a machine with the sources available, or that they aren't
+reading it from a website. We're working on it! -->
+
+</sect2>
+</sect1>
+
+<sect1 id="the-mcoprc-file">
+
+<title>The <literal role="extension">.mcoprc</literal> file</title>
+
+<para>
+The <literal role="extension">.mcoprc</literal> file (in each user's
+home folder) can be used to configure &MCOP; in some ways. Currently,
+the following is possible:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term>GlobalComm</term>
+<listitem>
+<para>
+The name of an interface to be used for global communication. Global
+communication is used to find other objects and obtain the secret
+cookie. Multiple &MCOP; clients/servers that should be able to talk to
+each other need to have a GlobalComm object which is able to share
+information between them. Currently, the possible values are
+<quote>Arts::TmpGlobalComm</quote> to communicate via <filename
+class="directory">/tmp/mcop-<replaceable>username</replaceable></filename>
+folder (which will only work on the local computer) and
+<quote>Arts::X11GlobalComm</quote> to communicate via the root window
+properties on the X11 server.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>TraderPath</term>
+
+<listitem>
+<para>
+Specifies where to look for trader information. You can list more than
+one folder here, and separate them with commas, like
+</para>
+</listitem>
+
+</varlistentry>
+
+<varlistentry>
+<term>ExtensionPath</term>
+
+<listitem>
+<para>
+Specifies from which folders extensions (in the form of shared
+libraries) are loaded. Multiple values can be specified comma separated.
+</para>
+</listitem>
+
+</varlistentry>
+</variablelist>
+
+<para>
+An example which uses all of the above is:
+</para>
+
+<programlisting>
+# $HOME/.mcoprc file
+GlobalComm=Arts::X11GlobalComm
+
+# if you are a developer, it might be handy to add a folder in your home
+# to the trader/extension path to be able to add components without
+# installing them
+TraderPath="/opt/kde2/lib/mcop","/home/joe/mcopdevel/mcop"
+ExtensionPath="/opt/kde2/lib","/home/joe/mcopdevel/lib"
+</programlisting>
+
+</sect1>
+
+<sect1 id="mcop-for-corba-users">
+<title>&MCOP; for <acronym>CORBA</acronym> Users</title>
+
+<para>
+If you have used <acronym>CORBA</acronym> before, you will see that
+&MCOP; is much the same thing. In fact, &arts; prior to version 0.4 used
+<acronym>CORBA</acronym>.
+</para>
+
+<para>
+The basic idea of <acronym>CORBA</acronym> is the same: you implement
+objects (components). By using the &MCOP; features, your objects are not
+only available as normal classes from the same process (via standard C++
+techniques) - they also are available to remote servers
+transparently. For this to work, the first thing you need to do is to
+specify the interface of your objects in an &IDL; file - just like
+<acronym>CORBA</acronym> &IDL;. There are only a few differences.
+</para>
+
+<sect2 id="corba-missing">
+<title><acronym>CORBA</acronym> Features That Are Missing In
+&MCOP;</title>
+
+<para>
+In &MCOP; there are no <quote>in</quote> and <quote>out</quote>
+parameters on method invocations. Parameters are always incoming, the
+return code is always outgoing, which means that the interface:
+</para>
+
+<programlisting>
+// CORBA idl
+interface Account {
+ void deposit( in long amount );
+ void withdraw( in long amount );
+ long balance();
+};
+</programlisting>
+
+<para>
+is written as
+</para>
+
+<programlisting>
+// MCOP idl
+interface Account {
+ void deposit( long amount );
+ void withdraw( long amount );
+ long balance();
+};
+</programlisting>
+
+<para>
+in &MCOP;.
+</para>
+
+<para>
+There is no exception support. &MCOP; doesn't have exceptions - it uses
+something else for error handling.
+</para>
+
+<para>
+There are no union types and no typedefs. I don't know if that is a real
+weakness, something one would desperately need to survive.
+</para>
+
+<para>
+There is no support for passing interfaces or object references
+</para>
+
+</sect2>
+
+<sect2 id="corba-different">
+<title><acronym>CORBA</acronym> Features That Are Different In
+&MCOP;</title>
+
+<para>
+You declare sequences as
+<quote>sequence<replaceable>type</replaceable></quote> in &MCOP;. There
+is no need for a typedef. For example, instead of:
+</para>
+
+<programlisting>
+// CORBA idl
+struct Line {
+ long x1,y1,x2,y2;
+};
+typedef sequence&lt;Line&gt; LineSeq;
+interface Plotter {
+ void draw(in LineSeq lines);
+};
+</programlisting>
+
+<para>
+you would write
+</para>
+
+<programlisting>
+// MCOP idl
+struct Line {
+ long x1,y1,x2,y2;
+};
+interface Plotter {
+ void draw(sequence&lt;Line&gt; lines);
+};
+</programlisting>
+
+</sect2>
+
+<sect2 id="no-in-corba">
+<title>&MCOP; Features That Are Not In <acronym>CORBA</acronym></title>
+
+<para>
+You can declare streams, which will then be evaluated by the &arts;
+framework. Streams are declared in a similar manner to attributes. For
+example:
+</para>
+
+<programlisting>
+// MCOP idl
+interface Synth_ADD : SynthModule {
+ in audio stream signal1,signal2;
+ out audio stream outvalue;
+};
+</programlisting>
+
+<para>
+This says that your object will accept two incoming synchronous audio
+streams called signal1 and signal2. Synchronous means that these are
+streams that deliver x samples per second (or other time), so that the
+scheduler will guarantee to always provide you a balanced amount of
+input data (&eg; 200 samples of signal1 are there and 200 samples
+signal2 are there). You guarantee that if your object is called with
+those 200 samples signal1 + signal2, it is able to produce exactly 200
+samples to outvalue.
+</para>
+
+</sect2>
+
+<sect2 id="mcop-binding">
+<title>The &MCOP; C++ Language Binding</title>
+
+<para>
+This differs from <acronym>CORBA</acronym> mostly:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Strings use the C++ <acronym>STL</acronym> <classname>string</classname>
+class. When stored in sequences, they are stored <quote>plain</quote>,
+that means they are considered to be a primitive type. Thus, they need
+copying.
+</para>
+</listitem>
+
+<listitem>
+<para>
+longs are plain long's (expected to be 32 bit).
+</para>
+</listitem>
+
+<listitem>
+<para>
+Sequences use the C++ <acronym>STL</acronym>
+<classname>vector</classname> class.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Structures are all derived from the &MCOP; class
+<classname>Type</classname>, and generated by the &MCOP; &IDL;
+compiler. When stored in sequences, they are not stored
+<quote>plain</quote> , but as pointers, as otherwise, too much copying
+would occur.
+</para>
+</listitem>
+</itemizedlist>
+</sect2>
+
+<sect2 id="implementing-objects">
+<title>Implementing &MCOP; Objects</title>
+
+<para>
+After having them passed through the &IDL; compiler, you need to derive
+from the <classname>_skel</classname> class. For instance, consider you
+have defined your interface like this:
+</para>
+
+<programlisting>
+// MCOP idl: hello.idl
+interface Hello {
+ void hello(string s);
+ string concat(string s1, string s2);
+ long sum2(long a, long b);
+};
+</programlisting>
+
+<para>
+You pass that through the &IDL; compiler by calling
+<userinput><command>mcopidl</command>
+<parameter>hello.idl</parameter></userinput>, which will in turn generate
+<filename>hello.cc</filename> and <filename>hello.h</filename>. To
+implement it, you need to define a C++-class that inherits the skeleton:
+</para>
+
+<programlisting>
+// C++ header file - include hello.h somewhere
+class Hello_impl : virtual public Hello_skel {
+public:
+ void hello(const string&amp; s);
+ string concat(const string&amp; s1, const string&amp; s2);
+ long sum2(long a, long b);
+};
+</programlisting>
+
+<para>
+Finally, you need to implement the methods as normal C++
+</para>
+
+<programlisting>
+// C++ implementation file
+
+// as you see string's are passed as const string references
+void Hello_impl::hello(const string&amp; s)
+{
+ printf("Hello '%s'!\n",s.c_str());
+}
+
+// when they are a returncode they are passed as "normal" strings
+string Hello_impl::concat(const string&amp; s1, const string&amp; s2)
+{
+ return s1+s2;
+}
+
+long Hello_impl::sum2(long a, long b)
+{
+ return a+b;
+}
+</programlisting>
+
+<para>
+Once you do that, you have an object which can communicate using &MCOP;.
+Just create one (using the normal C++ facilities to create an object):
+</para>
+
+<programlisting>
+ Hello_impl server;
+</programlisting>
+
+<para>
+And as soon as you give somebody the reference
+</para>
+
+<programlisting>
+ string reference = server._toString();
+ printf("%s\n",reference.c_str());
+</programlisting>
+
+<para>
+and go to the &MCOP; idle loop
+</para>
+
+<programlisting>
+Dispatcher::the()-&gt;run();
+</programlisting>
+
+<para>
+People can access the thing using
+</para>
+
+<programlisting>
+// this code can run anywhere - not necessarily in the same process
+// (it may also run on a different computer/architecture)
+
+ Hello *h = Hello::_fromString([the object reference printed above]);
+</programlisting>
+
+<para>
+and invoke methods:
+</para>
+
+<programlisting>
+ if(h)
+ h-&gt;hello("test");
+ else
+ printf("Access failed?\n");
+</programlisting>
+
+</sect2>
+</sect1>
+
+<sect1 id="mcop-security">
+<title>&MCOP; Security Considerations</title>
+
+<para>
+Since &MCOP; servers will listen on a <acronym>TCP</acronym> port,
+potentially everybody (if you are on the Internet) may try to connect
+&MCOP; services. Thus, it is important to authenticate clients. &MCOP;
+uses the md5-auth protocol.
+</para>
+
+<para>
+The md5-auth protocol does the following to ensure that only selected
+(trusted) clients may connect to a server:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+It assumes you can give every client a secret cookie.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Every time a client connects, it verifies that this client knows that
+secret cookie, without actually transferring it (not even in a form that
+somebody listening to the network traffic could find it out).
+</para>
+</listitem>
+
+</itemizedlist>
+
+<para>
+To give each client the secret cookie, &MCOP; will (normally) put it in
+the <filename class="directory">mcop</filename> folder (under
+<filename
+class="directory">/tmp/mcop-<envar>USER</envar>/secret-cookie</filename>). Of
+course, you can copy it to other computers. However, if you do so, use a
+secure transfer mechanism, such as <command>scp</command> (from
+<application>ssh</application>).
+</para>
+
+<para>
+The authentication of clients uses the following steps:
+</para>
+
+<procedure>
+<step>
+<para>
+[SERVER] generate a new (random) cookie R
+</para>
+</step>
+
+<step>
+<para>
+[SERVER] send it to the client
+</para>
+</step>
+
+<step>
+<para>
+[CLIENT] read the "secret cookie" S from a file
+</para>
+</step>
+
+<step>
+<para>
+[CLIENT] mangle the cookies R and S to a mangled cookie M using the MD5
+algorithm
+</para>
+</step>
+
+<step>
+<para>
+[CLIENT] send M to the server
+</para>
+</step>
+
+<step>
+<para>
+[SERVER] verify that mangling R and S gives just the
+same thing as the cookie M received from the client. If yes,
+authentication is successful.
+</para>
+</step>
+
+</procedure>
+
+<para>
+This algorithm should be secure, given that
+</para>
+
+<orderedlist>
+<listitem>
+<para>
+The secret cookies and random cookies are <quote>random enough</quote>
+ and
+</para>
+</listitem>
+
+<listitem>
+<para>
+The MD5 hashing algorithm doesn't allow to find out the
+<quote>original text</quote>, that is the secret cookie S and the random
+cookie R (which is known, anyway), from the mangled cookie M.
+</para>
+</listitem>
+</orderedlist>
+
+<para>
+The &MCOP; protocol will start every new connection with an
+authentication process. Basically, it looks like this:
+</para>
+
+<procedure>
+
+<step>
+<para>
+Server sends a ServerHello message, which describes
+the known authentication protocols.
+</para>
+</step>
+
+<step>
+<para>
+Client sends a ClientHello message, which includes authentication info.
+</para>
+</step>
+
+<step>
+<para>
+Server sends an AuthAccept message.
+</para>
+</step>
+</procedure>
+
+<para>
+To see that the security actually works, we should look at how messages
+are processed on unauthenticated connections:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Before the authentication succeeds, the server will not receive other
+messages from the connection. Instead, if the server for instance
+expects a <quote>ClientHello</quote> message, and gets an mcopInvocation
+message, it will drop the connection.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If the client doesn't send a valid &MCOP; message at all (no &MCOP;
+magic in the message header) in the authentication phase, but something
+else, the connection is dropped.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If the client tries to send a very very large message (&gt; 4096 bytes
+in the authentication phase, the message size is truncated to 0 bytes,
+which will cause that it isn't accepted for authentication) This is to
+prevent unauthenticated clients from sending &eg; 100 megabytes of
+message, which would be received and could cause the server to run out
+of memory.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If the client sends a corrupt ClientHello message (one, for which
+demarshalling fails), the connection is dropped.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If the client send nothing at all, then a timeout should occur (to be
+implemented).
+</para>
+</listitem>
+</itemizedlist>
+
+</sect1>
+
+<sect1 id="mcop-protocol">
+<title>&MCOP; Protocol Specification</title>
+
+<sect2 id="mcop-protocol-intro">
+<title>Introduction</title>
+
+<para>
+It has conceptual similarities to <acronym>CORBA</acronym>, but it is
+intended to extend it in all ways that are required for real time
+multimedia operations.
+</para>
+
+<para>
+It provides a multimedia object model, which can be used for both:
+communication between components in one address space (one process), and
+between components that are in different threads, processes or on
+different hosts.
+</para>
+
+<para>
+All in all, it will be designed for extremely high performance (so
+everything shall be optimized to be blazingly fast), suitable for very
+communicative multimedia applications. For instance streaming videos
+around is one of the applications of &MCOP;, where most
+<acronym>CORBA</acronym> implementations would go down to their knees.
+</para>
+
+<para>
+The interface definitions can handle the following natively:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Continuous streams of data (such as audio data).
+</para>
+</listitem>
+
+<listitem>
+<para>
+Event streams of data (such as &MIDI; events).
+</para>
+</listitem>
+
+<listitem>
+<para>
+Real reference counting.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+and the most important <acronym>CORBA</acronym> gimmicks, like
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Synchronous method invocations.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Asynchronous method invocations.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Constructing user defined data types.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Multiple inheritance.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Passing object references.
+</para>
+</listitem>
+</itemizedlist>
+
+</sect2>
+
+<sect2 id="mcop-protocol-marshalling">
+<title>The &MCOP; Message Marshalling</title>
+
+<para>
+Design goals/ideas:
+</para>
+
+<itemizedlist>
+
+<listitem>
+<para>
+Marshalling should be easy to implement.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Demarshalling requires the receiver to know what type he wants to
+demarshall.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The receiver is expected to use every information - so skipping is only
+in the protocol to a degree that:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+If you know you are going to receive a block of bytes, you don't need to
+look at each byte for an end marker.
+</para>
+</listitem>
+
+<listitem>
+<para>
+If you know you are going to receive a string, you don't need to read it
+until the zero byte to find out it's length while demarshalling, however,
+</para>
+</listitem>
+
+<listitem>
+<para>
+If you know you are going to receive a sequence of strings, you need to
+look at the length of each of them to find the end of the sequence, as
+strings have variable length. But if you use the strings for something
+useful, you'll need to do that anyway, so this is no loss.
+</para>
+</listitem>
+</itemizedlist>
+
+</listitem>
+
+<listitem>
+<para>
+As little overhead as possible.
+</para>
+</listitem>
+</itemizedlist>
+
+<!-- TODO: Make this a table -->
+
+<para>
+Marshalling of the different types is show in the table below:
+</para>
+
+<informaltable>
+<tgroup cols="3">
+<thead>
+<row>
+<entry>Type</entry>
+<entry>Marshalling Process</entry>
+<entry>Result</entry>
+</row>
+</thead>
+
+<tbody>
+<row>
+<entry><type>void</type></entry>
+<entry><type>void</type> types are marshalled by omitting them, so
+nothing is written to the stream for them.</entry>
+<entry></entry>
+</row>
+
+<row>
+<entry><type>long</type></entry>
+<entry>is marshalled as four bytes, the most significant byte first,
+so the number 10001025 (which is 0x989a81) would be marshalled
+as:</entry>
+<entry><literal>0x00 0x98 0x9a 0x81</literal></entry>
+</row>
+
+<row>
+<entry><type>enums</type></entry>
+<entry><para>are marshalled like <type>long</type>s</para></entry>
+<entry></entry>
+</row>
+
+<row>
+<entry><type>byte</type></entry>
+<entry><para>is marshalled as a single byte, so the byte 0x42 would be
+marshalled as:</para></entry>
+<entry><literal>0x42</literal></entry>
+</row>
+
+<row>
+<entry><type>string</type></entry>
+<entry><para>is marshalled as a <type>long</type>, containing the length
+of the following string, and then the sequence of characters strings
+must end with one zero byte (which is included in the length
+counting).</para>
+<important>
+<para>include the trailing 0 byte in length counting!</para>
+</important>
+<para><quote>hello</quote> would be marshalled as:</para></entry>
+<entry><literal>0x00 0x00 0x00 0x06 0x68 0x65 0x6c 0x6c 0x6f 0x00</literal></entry>
+</row>
+
+<row>
+<entry><type>boolean</type></entry>
+<entry><para>is marshalled as a byte, containing 0 if
+<returnvalue>false</returnvalue> or 1 if
+<returnvalue>true</returnvalue>, so the boolean value
+<returnvalue>true</returnvalue> is marshalled as:</para></entry>
+<entry><literal>0x01</literal></entry>
+</row>
+
+<row>
+<entry><type>float</type></entry>
+<entry><para>is marshalled after the four byte IEEE754 representation -
+detailed docs how IEEE works are here: <ulink
+url="http://twister.ou.edu/workshop.docs/common-tools/numerical_comp_guide/ncg_math.doc.html">http://twister.ou.edu/workshop.docs/common-tools/numerical_comp_guide/ncg_math.doc.html</ulink>
+and here: <ulink
+url="http://java.sun.com/docs/books/vmspec/2nd-edition/html/Overview.doc.html">http://java.sun.com/docs/books/vmspec/2nd-edition/html/Overview.doc.html</ulink>.
+So, the value 2.15 would be marshalled as:</para></entry>
+<entry><literal>0x9a 0x99 0x09 0x40</literal></entry>
+</row>
+
+<row>
+<entry><type>struct</type></entry>
+<entry><para>A structure is marshalled by marshalling it's
+contents. There are no additional prefixes or suffixes required, so the
+structure
+</para>
+<programlisting>
+struct test {
+ string name; // which is "hello"
+ long value; // which is 10001025 (0x989a81)
+};
+</programlisting>
+<para>would be marshalled as</para></entry>
+<entry>
+<literallayout>
+0x00 0x00 0x00 0x06 0x68 0x65 0x6c 0x6c
+0x6f 0x00 0x00 0x98 0x9a 0x81
+</literallayout></entry>
+</row>
+
+<row>
+<entry><type>sequence</type></entry>
+<entry><para>a sequence is marshalled by listing the number of elements
+that follow, and then marshalling the elements one by one.</para>
+<para>So a sequence of 3 longs a, with a[0] = 0x12345678, a[1] = 0x01
+and a[2] = 0x42 would be marshalled as:</para></entry>
+<entry>
+<literallayout>
+0x00 0x00 0x00 0x03 0x12 0x34 0x56 0x78
+0x00 0x00 0x00 0x01 0x00 0x00 0x00 0x42
+</literallayout>
+</entry>
+</row>
+</tbody>
+</tgroup>
+</informaltable>
+
+<para>
+If you need to refer to a type, all primitive types are referred by the
+names given above. Structures and enums get own names (like
+Header). Sequences are referred as *<replaceable>normal
+type</replaceable>, so that a sequence of longs is <quote>*long</quote>
+and a sequence of Header struct's is <quote>*Header</quote>.
+</para>
+
+</sect2>
+
+<sect2 id="mcop-protocol-messages">
+<title>Messages</title>
+
+<para>
+The &MCOP; message header format is defined as defined by this
+structure:
+</para>
+
+<programlisting>
+struct Header {
+ long magic; // the value 0x4d434f50, which is marshalled as MCOP
+ long messageLength;
+ long messageType;
+};
+</programlisting>
+
+<para>
+The possible messageTypes are currently
+</para>
+
+<programlisting>
+ mcopServerHello = 1
+ mcopClientHello = 2
+ mcopAuthAccept = 3
+ mcopInvocation = 4
+ mcopReturn = 5
+ mcopOnewayInvocation = 6
+</programlisting>
+
+<para>
+A few notes about the &MCOP; messaging:
+</para>
+
+
+<itemizedlist>
+<listitem>
+<para>
+Every message starts with a Header.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Some messages types should be dropped by the server, as long as the
+authentication is not complete.
+</para>
+</listitem>
+
+<listitem>
+<para>
+After receiving the header, the protocol (connection) handling can
+receive the message completely, without looking at the contents.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+The messageLength in the header is of course in some cases redundant,
+which means that this approach is not minimal regarding the number of
+bytes.
+</para>
+
+<para>
+However, it leads to an easy (and fast) implementation of non-blocking
+messaging processing. With the help of the header, the messages can be
+received by protocol handling classes in the background (non-blocking),
+if there are many connections to the server, all of them can be served
+parallel. You don't need to look at the message content, to receive the
+message (and to determine when you are done), just at the header, so the
+code for that is pretty easy.
+</para>
+
+<para>
+Once a message is there, it can be demarshalled and processed in one
+single pass, without caring about cases where not all data may have been
+received (because the messageLength guarantees that everything is
+there).
+</para>
+
+</sect2>
+
+<sect2 id="mcop-protocol-invocations">
+<title>Invocations</title>
+
+<para>
+To call a remote method, you need to send the following structure in the
+body of an &MCOP; message with the messageType = 1 (mcopInvocation):
+</para>
+
+<programlisting>
+struct Invocation {
+ long objectID;
+ long methodID;
+ long requestID;
+};
+</programlisting>
+
+<para>
+after that, you send the parameters as structure, &eg; if you invoke the
+method string concat(string s1, string s2), you send a structure like
+</para>
+
+<programlisting>
+struct InvocationBody {
+ string s1;
+ string s2;
+};
+</programlisting>
+
+
+<para>
+if the method was declared to be oneway - that means asynchronous
+without return code - then that was it. Otherwise, you'll receive as
+answer the message with messageType = 2 (mcopReturn)
+</para>
+
+<programlisting>
+struct ReturnCode {
+ long requestID;
+ &lt;resulttype&gt; result;
+};
+</programlisting>
+
+
+<para>
+where &lt;resulttype&gt; is the type of the result. As void types are
+omitted in marshalling, you can also only write the requestID if you
+return from a void method.
+</para>
+
+<para>
+So our string concat(string s1, string s2) would lead to a returncode
+like
+</para>
+
+<programlisting>
+struct ReturnCode {
+ long requestID;
+ string result;
+};
+</programlisting>
+
+</sect2>
+
+<sect2 id="mcop-protocol-inspecting">
+<title>Inspecting Interfaces</title>
+
+<para>
+To do invocations, you need to know the methods an object supports. To
+do so, the methodID 0, 1, 2 and 3 are hardwired to certain
+functionalities. That is
+</para>
+
+<programlisting>
+long _lookupMethod(MethodDef methodDef); // methodID always 0
+string _interfaceName(); // methodID always 1
+InterfaceDef _queryInterface(string name); // methodID always 2
+TypeDef _queryType(string name); // methodID always 3
+</programlisting>
+
+<para>
+to read that, you of course need also
+</para>
+
+<programlisting>
+struct MethodDef {
+ string methodName;
+ string type;
+ long flags; // set to 0 for now (will be required for streaming)
+ sequence&lt;ParamDef&gt; signature;
+};
+
+struct ParamDef {
+ string name;
+ long typeCode;
+};
+</programlisting>
+
+<para>
+the parameters field contains type components which specify the types of
+the parameters. The type of the returncode is specified in the
+MethodDef's type field.
+</para>
+
+<para>
+Strictly speaking, only the methods
+<methodname>_lookupMethod()</methodname> and
+<methodname>_interfaceName()</methodname> differ from object to object,
+while the <methodname>_queryInterface()</methodname> and
+<methodname>_queryType()</methodname> are always the same.
+</para>
+
+<para>
+What are those methodIDs? If you do an &MCOP; invocation, you are
+expected to pass a number for the method you are calling. The reason for
+that is, that numbers can be processed much faster than strings when
+executing an &MCOP; request.
+</para>
+
+<para>
+So how do you get those numbers? If you know the signature of the
+method, that is a MethodDef that describes the method, (which contains
+name, type, parameter names, parameter types and such), you can pass
+that to _lookupMethod of the object where you wish to call a method. As
+_lookupMethod is hardwired to methodID 0, you should encounter no
+problems doing so.
+</para>
+
+<para>
+On the other hand, if you don't know the method signature, you can find
+which methods are supported by using _interfaceName, _queryInterface and
+_queryType.
+</para>
+</sect2>
+
+<sect2 id="mcop-protocol-typedefs">
+<title>Type Definitions</title>
+
+<para>
+User defined datatypes are described using the
+<structname>TypeDef</structname> structure:
+</para>
+
+<programlisting>
+struct TypeComponent {
+ string type;
+ string name;
+};
+
+struct TypeDef {
+ string name;
+
+ sequence&lt;TypeComponent&gt; contents;
+};
+</programlisting>
+
+</sect2>
+</sect1>
+
+<sect1 id="why-not-dcop">
+<title>Why &arts; Doesn't Use &DCOP;</title>
+
+<para>
+Since &kde; dropped <acronym>CORBA</acronym> completely, and is using
+&DCOP; everywhere instead, naturally the question arises why &arts;
+isn't doing so. After all, &DCOP; support is in
+<classname>KApplication</classname>, is well-maintained, supposed to
+integrate greatly with libICE, and whatever else.
+</para>
+
+<para>
+Since there will be (potentially) a lot of people asking whether having
+&MCOP; besides &DCOP; is really necessary, here is the answer. Please
+don't get me wrong, I am not trying to say <quote>&DCOP; is
+bad</quote>. I am just trying to say <quote>&DCOP; isn't the right
+solution for &arts;</quote> (while it is a nice solution for other
+things).
+</para>
+
+<para>
+First, you need to understand what exactly &DCOP; was written
+for. Created in two days during the &kde;-TWO meeting, it was intended
+to be as simple as possible, a really <quote>lightweight</quote>
+communication protocol. Especially the implementation left away
+everything that could involve complexity, for instance a full blown
+concept how data types shall be marshalled.
+</para>
+
+<para>
+Even although &DCOP; doesn't care about certain things (like: how do I
+send a string in a network-transparent manner?) - this needs to be
+done. So, everything that &DCOP; doesn't do, is left to &Qt; in the
+&kde; apps that use &DCOP; today. This is mostly type management (using
+the &Qt; serialization operator).
+</para>
+
+<para>
+So &DCOP; is a minimal protocol which perfectly enables &kde;
+applications to send simple messages like <quote>open a window pointing
+to http://www.kde.org</quote> or <quote>your configuration data has
+changed</quote>. However, inside &arts; the focus lies on other things.
+</para>
+
+<para>
+The idea is, that little plugins in &arts; will talk involving such data
+structures as <quote>midi events</quote> and <quote>songposition
+pointers</quote> and <quote>flow graphs</quote>.
+</para>
+
+<para>
+These are complex data types, which must be sent between different
+objects, and be passed as streams, or parameters. &MCOP; supplies a type
+concept, to define complex data types out of simpler ones (similar to
+structs or arrays in C++). &DCOP; doesn't care about types at all, so
+this problem would be left to the programmer - like: writing C++ classes
+for the types, and make sure they can serialize properly (for instance:
+support the &Qt; streaming operator).
+</para>
+
+<para>
+But that way, they would be inaccessible to everything but direct C++
+coding. Specifically, you could not design a scripting language, that
+would know all types plugins may ever expose, as they are not self
+describing.
+</para>
+
+<para>
+Much the same argument is valid for interfaces as well. &DCOP; objects
+don't expose their relationships, inheritance hierarchies, etc. - if you
+were to write an object browser which shows you <quote>what attributes
+has this object got</quote>, you'd fail.
+</para>
+
+
+<para>
+While Matthias told me that you have a special function
+<quote>functions</quote> on each object that tells you about the methods
+that an object supports, this leaves out things like attributes
+(properties), streams and inheritance relations.
+</para>
+
+<para>
+This seriously breaks applications like &arts-builder;. But remember:
+&DCOP; was not so much intended to be an object model (as &Qt; already
+has one with <application>moc</application> and similar), nor to be
+something like <acronym>CORBA</acronym>, but to supply inter-application
+communication.
+</para>
+
+<para>
+Why &MCOP; even exists is: it should work fine with streams between
+objects. &arts; makes heavily use of small plugins, which interconnect
+themselves with streams. The <acronym>CORBA</acronym> version of &arts;
+had to introduce a very annoying split between <quote>the SynthModule
+objects</quote>, which were the internal work modules that did do the
+streaming, and <quote>the <acronym>CORBA</acronym> interface</quote>,
+which was something external.
+</para>
+
+<para>
+Much code cared about making interaction between <quote>the SynthModule
+objects</quote> and <quote>the <acronym>CORBA</acronym>
+interface</quote> look natural, but it didn't, because
+<acronym>CORBA</acronym> knew nothing at all about streams. &MCOP;
+does. Look at the code (something like
+<filename>simplesoundserver_impl.cc</filename>). Way better! Streams
+can be declared in the interface of modules, and implemented in a
+natural looking way.
+</para>
+
+<para>
+One can't deny it. One of the reasons why I wrote &MCOP; was speed. Here
+are some arguments why &MCOP; will definitely be faster than &DCOP;
+(even without giving figures).
+</para>
+
+
+<para>
+An invocation in &MCOP; will have a six-<quote>long</quote>-header. That
+is:
+</para>
+
+<itemizedlist>
+<listitem><para>magic <quote>MCOP</quote></para></listitem>
+<listitem><para>message type (invocation)</para></listitem>
+<listitem><para>size of the request in bytes</para></listitem>
+<listitem><para>request ID</para></listitem>
+<listitem><para>target object ID</para></listitem>
+<listitem><para>target method ID</para></listitem>
+</itemizedlist>
+
+<para>
+After that, the parameters follow. Note that the demarshalling of this
+is extremely fast. You can use table lookups to find the object and the
+method demarshalling function, which means that complexity is O(1) [ it
+will take the same amount of time, no matter how many objects are alive,
+or how many functions are there ].
+</para>
+
+<para>
+Comparing this to &DCOP;, you'll see, that there are at least
+</para>
+
+<itemizedlist>
+<listitem><para>a string for the target object - something like
+<quote>myCalculator</quote></para></listitem>
+<listitem><para>a string like <quote>addNumber(int,int)</quote> to
+specify the method</para></listitem>
+<listitem><para>several more protocol info added by libICE, and other
+DCOP specifics I don't know</para></listitem>
+</itemizedlist>
+
+<para>
+These are much more painful to demarshall, as you'll need to parse the
+string, search for the function, &etc;.
+</para>
+
+<para>
+In &DCOP;, all requests are running through a server
+(<application>DCOPServer</application>). That means, the process of a
+synchronous invocation looks like this:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Client process sends invocation.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>DCOPserver</application> (man-in-the-middle) receives
+invocation and looks where it needs to go, and sends it to the
+<quote>real</quote> server.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Server process receives invocation, performs request and sends result.
+</para>
+</listitem>
+
+<listitem>
+<para>
+<application>DCOPserver</application> (man-in-the-middle) receives
+result and ... sends it to the client.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Client decodes reply.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+In &MCOP;, the same invocation looks like this:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Client process sends invocation.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Server process receives invocation, performs request and sends result.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Client decodes reply.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+Say both were implemented correctly, &MCOP;s peer-to-peer strategy
+should be faster by a factor of two, than &DCOP;s man-in-the-middle
+strategy. Note however that there were of course reasons to choose the
+&DCOP; strategy, which is namely: if you have 20 applications running,
+and each app is talking to each app, you need 20 connections in &DCOP;,
+and 200 with &MCOP;. However in the multimedia case, this is not
+supposed to be the usual setting.
+</para>
+
+<para>
+I tried to compare &MCOP; and &DCOP;, doing an invocation like adding
+two numbers. I modified testdcop to achieve this. However, the test may
+not have been precise on the &DCOP; side. I invoked the method in the
+same process that did the call for &DCOP;, and I didn't know how to get
+rid of one debugging message, so I used output redirection.
+</para>
+
+<para>
+The test only used one object and one function, expect &DCOP;s results
+to decrease with more objects and functions, while &MCOP;s results
+should stay the same. Also, the <application>dcopserver</application>
+process wasn't connected to other applications, it might be that if many
+applications are connected, the routing performance decreases.
+</para>
+
+<para>
+The result I got was that while &DCOP; got slightly more than 2000
+invocations per second, &MCOP; got slightly more than 8000 invocations
+per second. That makes a factor of 4. I know that &MCOP; isn't tuned to
+the maximum possible, yet. (Comparison: <acronym>CORBA</acronym>, as
+implemented with mico, does something between 1000 and 1500 invocations
+per second).
+</para>
+
+<para>
+If you want <quote>harder</quote> data, consider writing some small
+benchmark app for &DCOP; and send it to me.
+</para>
+
+<para>
+<acronym>CORBA</acronym> had the nice feature that you could use objects
+you implemented once, as <quote>separate server process</quote>, or as
+<quote>library</quote>. You could use the same code to do so, and
+<acronym>CORBA</acronym> would transparently decide what to do. With
+&DCOP;, that is not really intended, and as far as I know not really
+possible.
+</para>
+
+<para>
+&MCOP; on the other hand should support that from the beginning. So you
+can run an effect inside &artsd;. But if you are a wave editor, you can
+choose to run the same effect inside your process space as well.
+</para>
+
+<para>
+While &DCOP; is mostly a way to communicate between apps, &MCOP; is also
+a way to communicate inside apps. Especially for multimedia streaming,
+this is important (as you can run multiple &MCOP; objects parallely, to
+solve a multimedia task in your application).
+</para>
+
+<para>
+Although &MCOP; does not currently do so, the possibilities are open to
+implement quality of service features. Something like <quote>that &MIDI; event
+is really really important, compared to this invocation</quote>. Or something
+like <quote>needs to be there in time</quote>.
+</para>
+
+<para>
+On the other hand, stream transfer can be integrated in the &MCOP;
+protocol nicely, and combined with <acronym>QoS</acronym> stuff. Given
+that the protocol may be changed, &MCOP; stream transfer should not
+really get slower than conventional <acronym>TCP</acronym> streaming,
+but: it will be easier and more consistent to use.
+</para>
+
+<para>
+There is no need to base a middleware for multimedia on &Qt;. Deciding
+so, and using all that nice &Qt;-streaming and stuff, will easily lead
+to the middleware becoming a &Qt;-only (or rather &kde;-only) thing. I
+mean: as soon as I'll see the GNOMEs using &DCOP;, too, or something like
+that, I am certainly proven wrong.
+</para>
+
+<para>
+While I do know that &DCOP; basically doesn't know about the data types
+it sends, so that you could use &DCOP; without using &Qt;, look at how
+it is used in daily &kde; usage: people send types like
+<classname>QString</classname>, <classname>QRect</classname>,
+<classname>QPixmap</classname>, <classname>QCString</classname>, ...,
+around. These use &Qt;-serialization. So if somebody choose to support
+&DCOP; in a GNOME program, he would either have to claim to use
+<classname>QString</classname>,... types (although he doesn't do so),
+and emulate the way &Qt; does the streaming, or he would send other
+string, pixmap and rect types around, and thus not be interoperable.
+</para>
+
+<para>
+Well, whatever. &arts; was always intended to work with or without
+&kde;, with or without &Qt;, with or without X11, and maybe even with or
+without &Linux; (and I have even no problems with people who port it to
+a popular non-free operating systems).
+</para>
+
+<para>
+It is my position that non-&GUI;-components should be written
+non-&GUI;-dependant, to make sharing those among wider amounts of
+developers (and users) possible.
+</para>
+
+<para>
+I see that using two <acronym>IPC</acronym> protocols may cause
+inconveniences. Even more, if they are both non-standard. However, for
+the reasons given above, switching to &DCOP; is no option. If there is
+significant interest to find a way to unite the two, okay, we can
+try. We could even try to make &MCOP; speak <acronym>IIOP</acronym>,
+then we'd have a <acronym>CORBA</acronym> <acronym>ORB</acronym> ;).
+</para>
+
+<para>
+I talked with Matthias Ettrich a bit about the future of the two
+protocols, and we found lots of ways how things could go on. For
+instance, &MCOP; could handle the message communication in &DCOP;, thus
+bringing the protocols a bit closer together.
+</para>
+
+<para>
+So some possible solutions would be:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+Write an &MCOP; - &DCOP; gateway (which should be possible, and would
+make interoperation possible) - note: there is an experimental
+prototype, if you like to work on that.
+</para>
+</listitem>
+
+<listitem>
+<para>
+Integrate everything &DCOP; users expect into &MCOP;, and try to only do
+&MCOP; - one could add an <quote>man-in-the-middle-option</quote> to
+&MCOP;, too ;)
+</para>
+</listitem>
+
+<listitem>
+<para>
+Base &DCOP; on &MCOP; instead of libICE, and slowly start integrating
+things closer together.
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+However, it may not be the worst possibility to use each protocol for
+everything it was intended for (there are some big differences in the
+design goals), and don't try to merge them into one.
+</para>
+
+</sect1>
+</chapter>
diff --git a/doc/artsbuilder/midi.docbook b/doc/artsbuilder/midi.docbook
new file mode 100644
index 00000000..611b457c
--- /dev/null
+++ b/doc/artsbuilder/midi.docbook
@@ -0,0 +1,474 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="midi">
+<title>&MIDI;</title>
+
+<sect1 id="midi-overview">
+<title>Overview</title>
+
+<!-- what-to-say-here: aRts has three roles
+ * moving midi events around between applications
+ * abstracting the hardware
+ * synthesizer -->
+
+<para>
+The &MIDI; support in &arts; can do a number of things. First of all, it
+allows <emphasis>communication</emphasis> between different pieces of
+software that produce or consume &MIDI; events. If you for instance have
+a sequencer and a sampler that are both &arts; aware, &arts; can send
+the &MIDI; events from the sequencer to the sampler.
+</para>
+
+<para>
+On the other hand, &arts; can also help applications to
+<emphasis>interact with the hardware</emphasis>. If a piece of software
+(for instance the sampler) works together with &arts;, it will be able
+to receive the &MIDI; events from an external &MIDI; keyboard as well.
+</para>
+
+<para>
+Finally, &arts; makes a great <emphasis>modular
+synthesizer</emphasis>. It is designed to do exactly this. So you can
+build instruments out of small modules using artsbuilder, and then use
+these instruments to compose or play music. Synthesis does not
+necessarily mean pure synthesis, there are modules you can use to play
+samples. So &arts; can be a sampler, synthesizer, and so on, and being
+fully modular, it is very easy to extend, very easy to experiment with,
+powerful and flexible.
+</para>
+</sect1>
+
+<sect1 id="midi-manager">
+<title>The &MIDI; Manager</title>
+<!-- what-to-say-here:
+ * how to use artscontrol - view midimanager
+ * what does autorestore do? (not yet implemented - so not yet documented) -->
+
+<para>
+The central component in &arts; that keeps track which applications are
+connected and how midi events should be passed between them is the midi
+manager. To see or influence what it does, start artscontrol. Then,
+choose <menuchoice><guilabel>View</guilabel><guilabel>View Midi
+Manager</guilabel> </menuchoice> from the menu.
+</para>
+
+<para>
+On the left side, you will see <guilabel>Midi Inputs</guilabel>. There,
+all objects that produce &MIDI; events, such as an external &MIDI; port
+which sends data from a connected &MIDI; keyboard, a sequencer which
+plays a song and so on will be listed. On the right side, you will see
+<guilabel>Midi Outputs</guilabel>. There, all things that consume &MIDI;
+events, such as a simulated sampler (as software), or the external
+&MIDI; port where your hardware sampler outside your computer is
+connected will be listed. New applications, such as sequencers and so on
+will register themselves, so the list will be changing over time.
+</para>
+
+<para>
+You can connect inputs and outputs if you mark the input on the left
+side and the output on the right side, and choose
+<guilabel>Connect</guilabel> with the button
+below. <guilabel>Disconnect</guilabel> works the same. You will see what
+is connected as small lines between the inputs and outputs, in the
+middle of the window. Note that you can connect one sender to more than
+one receiver (and the other way round).
+</para>
+
+<para>
+Programs (like the Brahms sequencer) will add themselves when they start
+and be removed from the list when they are terminated. But you can also
+add new things in the <guilabel>Add</guilabel> menu:
+</para>
+
+<variablelist>
+<varlistentry>
+<term><guimenuitem>System Midi Port (OSS)</guimenuitem></term>
+<listitem>
+<para>
+This will create a new &arts; object that talks to an external midi
+port.
+</para>
+
+<para>
+As external midi ports can do both, send and receive data, choosing this
+option will add a midi input and a midi output. Under &Linux;, you
+should either have an <acronym>OSS</acronym> (or
+<acronym>OSS</acronym>/Free, the thing that comes with your &Linux;
+kernel) or an <acronym>ALSA</acronym> driver for your soundcard
+installed, to make it work. It will ask for the name of the
+device. Usually, this is <filename
+class="devicefile">/dev/midi</filename> or <filename
+class="devicefile">/dev/midi00</filename>.
+</para>
+
+<para>
+However, if you have more than one &MIDI; device or a &MIDI; loopback
+driver installed, there might be more choices. To see information about
+your midi ports, start the &kcontrolcenter;, and choose
+<menuchoice><guilabel>Information</guilabel>
+<guilabel>Sound</guilabel></menuchoice>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>aRts Synthesis Midi Output</guimenuitem></term>
+<listitem>
+<para>
+This will add a new &MIDI; output with an &arts; synthesis
+instrument. If you choose the menu item, a dialog will pop up, and allow
+you to choose an instrument. You can create new instruments using
+artsbuilder. All <literal role="extension">.arts</literal> files with a
+name that starts with <filename>instrument_</filename> will appear here.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+</sect1>
+
+<sect1 id="brahms">
+<title>Using &arts; &amp; Brahms</title>
+
+<para>
+Actually, getting started is quite easy. You need a &kde; 2.1-aware
+version of &brahms;, which can be found in the <literal>kmusic</literal>
+<acronym>CVS</acronym> module. There is also information on how to get
+&brahms; on the <ulink url="http://www.arts-project.org/">aRts
+Homepage</ulink> in the Download section.
+</para>
+
+<para>
+When you start it, it will show up in the &MIDI; manager. If you want to
+do synthesis, simply add a synthesis &MIDI; instrument via
+<menuchoice><guilabel>Add</guilabel><guilabel>aRts Synthesis Midi
+Output</guilabel></menuchoice>.
+</para>
+
+<para>
+Choose an instrument (for instance <guilabel>organ2</guilabel>). Connect
+them using the <guilabel>Connect</guilabel> button. Finally, you can
+start composing in &brahms;, and the output will be synthesized with
+&arts;.
+</para>
+
+<para>
+It is usually a good idea to have the &artscontrol; window open, and see
+that the volume is not too loud (quality gets bad when the bars hit the
+upper limit). Now you can start working on a new &arts; demosong, and if
+you are done, you can get it published on aRts-project.org ;-).
+</para>
+
+<!-- TODO: how to do more than one instrument in Brahms (hm, not implemented
+ yet, not documented yet), how to use samples, mapping and so on. These
+ things need to be implemented, too. -->
+
+</sect1>
+
+<sect1 id="midisend">
+<title>midisend</title>
+
+<para>
+<command>midisend</command> is a small application that will allow you
+to send &MIDI; events from
+the shell. It will register as client like all other applications. The most
+simple way to use it is to do
+
+<screen><prompt>&percnt;</prompt> <userinput><command>midisend</command> <option>-f</option> <parameter><replaceable>/dev/midi00</replaceable></parameter></userinput> </screen>
+
+which will achieve about the same as adding a system &MIDI; port in
+&artscontrol;. (Not quite, because <command>midisend</command> only sends events). The difference is that it is
+easy for instance to start <command>midisend</command> on different computers (and like that,
+use network transparency).
+</para>
+
+<para>
+It is also possible to make <command>midisend</command> send data from
+<filename class="devicefile">stdin</filename>, which you can use to pipe
+data from non-&arts;-aware applications to &arts;, like this:
+
+<screen><prompt>&percnt;</prompt> <userinput><command><replaceable>applicationwhichproducesmidieventsonstdout</replaceable></command> | <command>midisend</command> <option>-f</option> <option><replaceable>-</replaceable></option></userinput></screen>
+<!-- TODO: document all options -->
+</para>
+
+</sect1>
+
+<sect1 id="midi-creating-instruments">
+<title>Creating Instruments</title>
+
+<para>
+The way &arts; does midi synthesis is this: you have a structures which
+has some input ports, where it gets the frequency, velocity (volume) and
+a parameter which indicates whether the note is still pressed. The
+structure should now synthesize exactly that note with that volume, and
+react on the pressed parameter (where pressed = 1 means the user still
+holds down that key and pressed = 0 means the user has released that
+key).
+</para>
+
+<para>
+When &MIDI; events arrive, &arts; will create new structures for the
+notes as needed, give them the parameters, and clean them up once they
+are done.
+</para>
+
+<para>
+To create and use such a structure, you should do the following:
+</para>
+
+<itemizedlist>
+<listitem>
+<para>
+To get started, the most convenient way is to open
+<filename>template_Instrument.arts</filename> in &arts-builder;.
+</para>
+
+<para>
+This can be achieved by using
+<menuchoice><guimenu>File</guimenu><guimenuitem>Open
+Example...</guimenuitem></menuchoice> and choosing
+<guimenuitem>template_Instrument</guimenuitem> in the file
+selector. This will give you an empty structure with the required
+parameters, which you only need to <quote>fill out</quote>.
+</para>
+</listitem>
+
+<listitem>
+<para>
+To process the pressed parameter, it is convenient to use
+Synth&lowbar;ENVELOPE&lowbar;ADSR, or, in case of playing some drum wav,
+just play it anyway, and ignore the pressed parameter.
+</para>
+</listitem>
+
+<listitem>
+<para>
+The structure should indicate when it is no longer needed on the
+<quote>done</quote> output. If done is <returnvalue>1</returnvalue>,
+&arts; assumes that it can delete the structure. Conveniently, the ADSR
+envelope provides a parameter when it is done, so you just need to
+connect this to the done output of the structure.
+</para>
+</listitem>
+
+<listitem>
+<para>
+You should rename your structure to some name starting with
+<filename>instrument_</filename>, like
+<filename>instrument_piano.arts</filename> - you should save the file
+under the same name, in your <filename
+class="directory">$<envar>HOME</envar>/arts/structures</filename>
+folder (which is where artsbuilder wants to save files normally).
+</para>
+</listitem>
+
+<listitem>
+<para>
+Finally, once you saved it, you will be able to use it with &artscontrol;
+in the &MIDI; manager <!-- todo link to midimanager -->.</para>
+</listitem>
+
+<listitem>
+<para>
+Oh, and of course your structure should play the audio data it generates
+to the left and right output of the structure, which will then be played
+via audio manager (you can see that in &artscontrol;), so that you
+finally can hear it (or postprocess it with effects).
+</para>
+</listitem>
+</itemizedlist>
+
+<para>
+A good way to learn how to do instruments is to open an existing
+instrument via <menuchoice><guilabel>File</guilabel><guilabel>Open
+Example</guilabel> </menuchoice> and see how it works ;)
+</para>
+</sect1>
+
+<sect1 id="mapped-instruments">
+<title>Mapped Instruments</title>
+
+<para>
+Mapped instruments are instruments, that behave differently depending on
+the pitch, the program, the channel or the velocity. You could for
+instance build a piano of 5 octaves, using one sample for each octave
+(pitchshifting it accordingly). That sounds a whole lot better than only
+using one sample.
+</para>
+
+<para>
+You could also build a drum map, that plays one specific drum sample per
+key.
+</para>
+
+<para>
+Finally, it is very useful if you put quite some different sounds into
+one mapped instrument on different programs. That way, you can use your
+sequencer, external keyboard or other &MIDI; source to switch between
+the sounds without having to tweak &arts; as you work.
+</para>
+
+<para>
+A good example for this is the instrument <filename>arts_all</filename>,
+which just puts together all instruments that come with &arts; in one
+map. That way, you just need to setup once in &artscontrol; to use this
+<quote>instrument</quote>, and then, you can compose a whole song in a
+sequencer without ever bothering about &arts;. Need another sound?
+Simply change the program in the sequencer, and &arts; will give you
+another sound.
+</para>
+
+<para>
+Creating such maps is pretty straightforward. You just need to create a
+textfile, and write rules which look like this:
+</para>
+
+<programlisting>
+ON <replaceable>[ conditions ...]</replaceable> DO structure=<replaceable>somestructure</replaceable>.arts
+</programlisting>
+
+<para>
+The conditions could be one or more than one of the following:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term><option>pitch</option></term>
+
+<listitem>
+<para>
+The pitch that is being played. You would use this if you want to split
+your instrument depending on the pitch. In our initial examples, a piano
+which uses different samples for different octaves would use this as
+condition. You can specify a single pitch, like
+<userinput><option>pitch</option>=<parameter>62</parameter></userinput>
+or a range of pitches, like
+<userinput><option>pitch</option>=<parameter>60</parameter>-<parameter>72</parameter></userinput>.
+The possible pitches are between <parameter>0</parameter> and
+<parameter>127</parameter>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>program</option></term>
+<listitem>
+<para>
+The program that is active on the channel that the note is being sent
+on. Usually, sequencers let you choose the <quote>instrument</quote> via
+the program setting. Single programs or ranges are allowed, that is
+<userinput><option>program</option>=<parameter>3</parameter></userinput>
+or
+<userinput><option>program</option>=<parameter>3</parameter>-<parameter>6</parameter></userinput>.
+The possible programs are between <parameter>0</parameter> and
+<parameter>127</parameter>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>channel</option></term>
+<listitem>
+<para>
+The channel that that the note is being sent on. Single channels or
+ranges are allowed, that is
+<userinput><option>channel</option>=<parameter>0</parameter></userinput>
+or
+<userinput><option>channel</option>=<parameter>0</parameter>-<parameter>8</parameter></userinput>.
+The possible channels are between <parameter>0</parameter> and
+<parameter>15</parameter>.
+</para>
+</listitem>
+
+</varlistentry>
+<varlistentry>
+<term><option>velocity</option></term>
+<listitem>
+<para>
+The velocity (volume) that that the note has. Single velocities (who
+would use that?) or ranges are allowed, that is
+<userinput><option>velocity</option>=<parameter>127</parameter></userinput>
+or
+<userinput><option>velocity</option>=<parameter>64</parameter>-<parameter>127</parameter></userinput>.
+The possible velocities are between <parameter>0</parameter> and
+<parameter>127</parameter>.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+<para>
+A complete example for a map would be (this is taken from the current
+<filename>instrument_arts_all.arts-map</filename>):
+</para>
+
+<programlisting>
+ON program=0 DO structure=instrument_tri.arts
+ON program=1 DO structure=instrument_organ2.arts
+ON program=2 DO structure=instrument_slide1.arts
+ON program=3 DO structure=instrument_square.arts
+ON program=4 DO structure=instrument_neworgan.arts
+ON program=5 DO structure=instrument_nokind.arts
+ON program=6 DO structure=instrument_full_square.arts
+ON program=7 DO structure=instrument_simple_sin.arts
+ON program=8 DO structure=instrument_simple_square.arts
+ON program=9 DO structure=instrument_simple_tri.arts
+ON program=10 DO structure=instrument_slide.arts
+ON program=11 pitch=60 DO structure=instrument_deepdrum.arts
+ON program=11 pitch=61 DO structure=instrument_chirpdrum.arts
+</programlisting>
+
+<para>
+As you see, the structure is chosen depending on the program. On
+program 11, you see a <quote>drum map</quote> (with two entries), which
+would play a <quote>deepdrum</quote> on C-5 (pitch=60), and a
+<quote>chirpdrum</quote> on C#5 (pitch=61).
+</para>
+
+<para>
+To make map files automatically appear in &artscontrol; as choice for
+the instrument, they have to be called
+<filename>instrument_<replaceable>something</replaceable>.arts-map</filename>
+and reside either in your Home Folder, under <filename
+class="directory">$<envar>HOME</envar>/arts/structures</filename>, or in the
+&kde; folder under <filename
+class="directory">$<envar>KDEDIR</envar>/usr/local/kde/share/apps/artsbuilder/examples</filename>. Structures
+that are used by the map can either be given with an absolute path, or
+relative to the folder the map file resides in.
+</para>
+
+<para>
+Extending the arts_all map or even making a complete general &MIDI; map
+for &arts; is a good idea for making &arts; easier to use
+out-of-the-box. Please consider contributing interesting instruments
+you make, so that they can be included in further version of &arts;.
+</para>
+</sect1>
+
+<!-- TODO: Maybe helpful
+ * using an external keyboard
+ * loopback midi device
+
+<sect1 id="quick-start">
+<title>Quick Start</title>
+<para>
+</para>
+</sect1>
+<sect1 id="internal-details">
+<title>More Internal Details</title>
+<para>
+</para>
+</sect1>
+
+<sect1 id="other-considerations">
+<title>Other Considerations</title>
+<para>
+</para>
+</sect1>
+-->
+
+</chapter>
diff --git a/doc/artsbuilder/midiintro.docbook b/doc/artsbuilder/midiintro.docbook
new file mode 100644
index 00000000..18d6fc93
--- /dev/null
+++ b/doc/artsbuilder/midiintro.docbook
@@ -0,0 +1,14 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE appendix PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<appendix id="midi-introduction">
+
+<title>Introduction to <acronym>MIDI</acronym></title>
+
+<para>
+Not yet written
+</para>
+
+</appendix>
diff --git a/doc/artsbuilder/modules.docbook b/doc/artsbuilder/modules.docbook
new file mode 100644
index 00000000..d0da50d8
--- /dev/null
+++ b/doc/artsbuilder/modules.docbook
@@ -0,0 +1,1336 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="arts-modules">
+<title>&arts; modules</title>
+
+ <sect1 id="modules-introduction">
+<title>Introduction</title>
+
+<para>
+This chapter describes all of the standard &arts; modules. One of the
+most powerful features of &arts;, modules can be connected together into
+structures to implement new functions such as effects and instruments.
+</para>
+
+<para>
+Modules are broken down into two categories. Synthesis modules are used
+for implementing the <quote>plumbing</quote> that manipulates multimedia
+data streams to implement new effects, instruments, mixers, and
+applications. Visual modules allow you to provide a graphical user
+interface to control the sound structures that are built up with the
+synthesis modules.
+</para>
+
+</sect1>
+
+<sect1 id="synth-modules-reference">
+<title>Synthesis Modules Reference</title>
+<para>
+</para>
+
+<sect2 id="mcat-synth-arithmetic-mixing">
+<title>Arithmetic + Mixing</title>
+
+<para>
+</para>
+
+<sect3 id="mref-synth-add-sect">
+<title>Synth&lowbar;ADD</title>
+<anchor id="mref-synth-add" />
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="images/Synth_ADD.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;ADD</phrase></textobject>
+</mediaobject>
+
+<para>
+This adds two signals.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-mul-sect">
+<title>Synth&lowbar;MUL</title>
+<anchor id="mref-synth-mul"/>
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="images/Synth_MUL.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;MUL</phrase></textobject>
+</mediaobject>
+
+<para>
+This multiplies a signal by a factor. You can use this to scale signals
+down (0 &lt; factor &lt; 1) or up (factor &gt; 1) or invert signals
+(factor &lt; 0). Note that the factor may be a signal and don't has to
+be constant (&eg; envelope or real signal).
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-div-sect">
+<title>Synth&lowbar;DIV</title>
+<anchor id="mref-synth-div"/>
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="images/Synth_DIV.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;DIV</phrase></textobject>
+</mediaobject>
+
+<para>
+This divides a signal by a factor. You can use this to do divide one signal
+by another one. Or set invalue1 to 1 and you will get the
+reciprocal of the invalue2 as outvalue. Take care that invalue2 never
+reaches 0 or you will get problems with divisions by zero.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-multi-add-sect">
+<title>Synth&lowbar;MULTI&lowbar;ADD</title>
+<anchor id="mref-synth-multi-add" />
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="images/Synth_MULTI_ADD.png"
+ format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;MULTI&lowbar;ADD</phrase></textobject>
+</mediaobject>
+
+<para>
+This adds an arbitrary number of signals. If you need to sum up the
+waveforms produces by four different oscillators, you for instance can
+connect all their outputs to one Synth&lowbar;MULTI&lowbar;ADD
+module. This is more efficient than using three Synth&lowbar;ADD
+modules.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-xfade-sect">
+<title>Synth&lowbar;XFADE</title>
+<anchor id="mref-synth-xfade" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_XFADE.png" format="PNG"/>
+</imageobject>
+<textobject><phrase>Synth&lowbar;XFADE</phrase></textobject>
+</mediaobject>
+
+<para>
+This crossfades two signals. If the percentage input is -1, only the
+left signal is heard, if it is 1, only the right signal is heard. When
+it is 0, both signals a heard with the same volume.
+</para>
+
+<para>
+This allows you to ensure that your signal stays in a well defined
+range. If you had two signals that were between -1 and 1 before
+crossfading, they will be in the same range after crossfading.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-autopanner-sect">
+<title>Synth&lowbar;AUTOPANNER</title>
+<anchor id="mref-synth-autopanner" />
+
+<para>
+The opposite of a crossfader. This takes a mono signal and splits it
+into a stereo signal: It is used to automatically pan the input signal
+between the left and the right output. This makes mixes more lively. A
+standard application would be a guitar or lead sound.
+</para>
+
+<para>
+Connect a <acronym>LFO</acronym>, a sine or saw wave for example to
+inlfo. and select a frequency between 0.1 and 5Hz for a traditional
+effect or even more for Special <acronym>FX</acronym>.
+</para>
+
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-busses">
+<title>Busses</title>
+
+<sect3 id="mref-synth-bus-uplink-sect">
+<title>Synth&lowbar;BUS&lowbar;UPLINK</title>
+<anchor id="mref-synth-bus-uplink" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_BUS_UPLINK.png"
+ format="PNG"/>
+</imageobject>
+<textobject><phrase>Synth&lowbar;BUS&lowbar;UPLINK</phrase></textobject>
+</mediaobject>
+
+<para>
+An uplink to a bus. Give signals to left and right, and the name of the
+bus where the data should go on the <quote>bus</quote> port. The
+combined signal from all uplinks with this name will appear on every
+downlink on that <quote>bus</quote>.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-bus-downlink-sect">
+<title>Synth&lowbar;BUS&lowbar;DOWNLINK</title>
+<anchor id="mref-synth-bus-downlink" />
+
+<mediaobject>
+<imageobject>
+<imagedata fileref="images/Synth_BUS_DOWNLINK.png"
+ format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;BUS&lowbar;DOWNLINK</phrase></textobject>
+</mediaobject>
+
+<para>
+Gets (the sum of) all data that is put to a certain bus (with the name
+you specify at the <quote>bus</quote> port).
+</para>
+</sect3>
+
+</sect2>
+
+<!-- TODO AFTER KDE2.1: move freeverb into delays, and rename category to
+ Delays &amp; reverbs -->
+
+<sect2 id="mcat-synth-delays">
+<title>Delays</title>
+
+<para>
+</para>
+
+<sect3 id="mref-synth-delay-sect">
+<title>Synth&lowbar;DELAY</title>
+<anchor id="mref-synth-delay" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_DELAY.png"
+ format="PNG"/></imageobject></mediaobject>
+
+<para>
+This delays the input signal for an amount of time. The time
+specification must be between 0 and maxdelay for a delay between 0 and
+maxdelay seconds.
+</para>
+
+<para>
+This kind of delay <emphasis>may not be used</emphasis> in feedback
+structures. This is because it's a variable delay. You can modify it's
+length while it is running, and even set it down to zero. But since in a
+feedback structure the own output is needed to calculate the next
+samples, a delay whose value could drop to zero during synthesis could
+lead to a stall situation.
+</para>
+
+<para>
+Use CDELAYs in that setup, perhaps combine a small constant delay (of
+0.001 seconds) with a flexible delay.
+</para>
+
+<para>
+You can also combine a CDELAY and a DELAY to achieve a variable length
+delay with a minimum value in a feedback loop. Just make sure that you
+have a CDELAY involved.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-cdelay-sect">
+<title>Synth&lowbar;CDELAY</title>
+<anchor id="mref-synth-cdelay" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_CDELAY.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;CDELAY</phrase></textobject>
+</mediaobject>
+
+<para>
+This delays the input signal for an amount of time. The time
+specification must be greater than 0 for a delay of 0 seconds or more.
+The delay is constant during the calculation, that means it
+can't be modified.
+</para>
+
+<para>
+This saves computing time as no interpolation is done, and is useful for
+recursive structures. See description above (Synth&lowbar;DELAY).
+</para>
+
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-envelopes">
+<title>Envelopes</title>
+
+<para>
+</para>
+
+<sect3 id="mref-synth-envelope-adsr-sect">
+<title>Synth&lowbar;ENVELOPE&lowbar;ADSR</title>
+<anchor id="mref-synth-envelope-adsr" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_ENVELOPE_ADSR.png"
+ format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;ENVELOPE&lowbar;ADSR</phrase></textobject>
+</mediaobject>
+
+<para>
+This is a classic <acronym>ADSR</acronym> envelope which means you
+specify:
+</para>
+
+<variablelist>
+<varlistentry>
+<term>active</term>
+<listitem>
+<para>
+Whether the note is being pressed right now by the user.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>invalue</term>
+<listitem>
+<para>
+The input signal.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>attack</term>
+<listitem>
+<para>
+The time that should pass between the user presses the note and the signal
+reaching it's maximum amplitude (in seconds).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>decay</term>
+<listitem>
+<para>
+The time that should pass between the the signal reaching it's maximum
+amplitude and the signal going back to some constant level (in seconds).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>sustain</term>
+<listitem>
+<para>
+The constant level the signal is held at afterwards, until the user
+releases the note.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>release</term>
+<listitem>
+<para>
+The time that should pass after the user has released the note until the
+signal is scaled down to zero (in seconds).
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+<para>
+You'll get the scaled signal at outvalue. If the <acronym>ASDR</acronym>
+envelope is finished, it will set done to 1. You can use this to provide
+the <quote>done</quote> output of an instrument (which will make the
+instrument structure be deleted by the &MIDI; router object once the
+release phase is over).
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-pscale-sect">
+<title>Synth&lowbar;PSCALE</title>
+<anchor id="mref-synth-pscale" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_PSCALE.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;PSCALE</phrase></textobject>
+</mediaobject>
+
+<para>
+The Synth&lowbar;PSCALE module will scale the audio stream that is
+directed through it from a volume 0 (silent) to 1 (original loudness)
+back to 0 (silent). According to the position (get the position from
+Synth&lowbar;SEQUENCE). The position where the peak should occur can be
+given as pos.
+</para>
+
+<para>
+Example: Setting top to 0.1 means that after 10&percnt; of the note has
+been played, the volume has reached its maximum, and starts decaying
+afterwards.
+</para>
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-effects">
+<title>Effects</title>
+
+<sect3 id="mref-synth-freeverb-sect">
+<title>Synth&lowbar;FREEVERB</title>
+<anchor id="mref-synth-freeverb" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_FREEVERB.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;FREEVERB</phrase></textobject>
+</mediaobject>
+
+<para>
+This is a reverb effect. In the current implementation, it is thought to
+pass a stereo signal through the reverb, and it will -add- it's reverb
+effect to the signal.
+</para>
+
+<note>
+<para>
+This means that it can be used inside an StereoEffectStack as well.
+</para>
+</note>
+
+<para>
+The input signal should be connected to (inleft, inright), the output
+signal will be (outleft, outright).
+</para>
+
+<para>
+The parameters which you can configure are:
+</para>
+
+<variablelist>
+<varlistentry>
+<term>roomsize</term>
+<listitem>
+<para>
+The size of the room which the reverb simulates (range: 0..1, where 1 is
+the largest possible room).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>damp</term>
+<listitem>
+<para>
+This specifies a filter which will make the simulated room absorb high
+frequencies (range 0..1, where 1 means absorb high frequencies quite
+aggressive).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>wet</term>
+<listitem>
+<para>
+The amount of reverb-signal (that is, the amount of the signal that
+should be modified by the filters, resulting in a <quote>wet</quote>,
+that is <quote>reverb sound</quote>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>dry</term>
+<listitem>
+<para>
+The amount of pure signal passed through, resulting in an echo (or
+combined delay) rather than reverb effect (range: 0..1).
+</para>
+<!-- TODO: do some measurements to show that this documentation -is- correct,
+I am not sure if it is echo, or really pure (non-delayed), or multiple delay
+or whatever -->
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>width</term>
+<listitem>
+<para>
+The amount of stereo-magic the reverb algorithm adds to the reverb
+effect, making the reverb sound wider in the stereo panorama (range:
+0..1).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>mode</term>
+<listitem>
+<para>
+[ TODO: I think if mode is 1, the reverb holds the current image of the
+sound, whereas 0 is normal operation ]
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+</sect3>
+
+<sect3 id="mref-synth-tremolo-sect">
+<title>Synth&lowbar;TREMOLO</title>
+<anchor id="mref-synth-tremolo" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_TREMOLO.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;TREMOLO</phrase></textobject>
+</mediaobject>
+
+<para>
+The tremolo module modulates the amplitude according to a
+<acronym>LFO</acronym>-Wave. Traditionally you would use a sine wave
+but why limit yourself? What you get is a very intense effect that cuts
+through most arrangements because of its high dynamic range. The
+tremolo effect is still one of guitarists favorite effects although
+it's not as popular as in the 1960's.
+</para>
+
+<para>
+[ TODO: currently this is implemented as invalue + abs(inlfo) - maybe it
+would make more sense to implement it as invalue * (1+inlfo*depth),
+where depth would be a parameter between 0..1 - decide this after &kde;2.1
+; if you have a comment, send a mail to the &arts; list ;). ]
+</para>
+
+</sect3>
+<sect3 id="mref-synth-fx-cflanger-sect">
+<title>Synth&lowbar;FX&lowbar;CFLANGER</title>
+<anchor id="mref-synth-fx-cflanger" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_FX_CFLANGER.png" format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;FX&lowbar;CFLANGER</phrase></textobject>
+</mediaobject>
+
+<para>
+A flanger is a time-varying delay effect. To make development of complex
+flanger effects simpler, this module is provided, which contains the
+core of a one-channel flanger.
+</para>
+
+<para>It has the following ports:</para>
+
+<variablelist>
+<varlistentry>
+<term>invalue</term>
+<listitem>
+<para>
+The signal which you want to process.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>lfo</term>
+<listitem>
+<para>
+Preferably a sine wave which modulates the delay time inside the
+flanger (-1 .. 1).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>mintime</term>
+<listitem>
+<para>
+The minimum value for the delay inside the flanger in milliseconds.
+Suggested values: try something like 1 ms. Please use values &lt; 1000
+ms.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>maxtime</term>
+<listitem>
+<para>
+The minimum value for the delay inside the flanger in milliseconds.
+Suggested values: try something like 5 ms. Please use values &lt; 1000
+ms.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>outvalue</term>
+<listitem>
+<para>
+The output signal. It is important that you mix that with the
+original (unflanged) signal to get the desired effect.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+<tip>
+<para>
+You can use this as a basis for a chorus effect.
+</para>
+</tip>
+
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-filters">
+<title>Filters</title>
+
+<sect3 id="mref-synth-pitch-shift-sect">
+<title>Synth&lowbar;PITCH&lowbar;SHIFT</title>
+<anchor id="mref-synth-pitch-shift" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_PITCH_SHIFT.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;PITCH&lowbar;SHIFT</phrase></textobject>
+</mediaobject>
+
+<para>
+This pitch shifting effect changes the frequency of the input signal
+without affecting the speed. An application for this is for instance
+changing the pitch of your voice while you record (and replay) it in
+realtime.
+</para>
+
+<para>
+The <emphasis>speed</emphasis> parameter is the relative speed with
+which the signal will be replayed. So a speed of two would make it sound
+twice as high (&ie; an input frequency of 440 Hz would result in an
+output frequency of 880 Hz).
+</para>
+
+<para>
+The <emphasis>frequency</emphasis> parameter is used internally to
+switch between different grains of the signal. It is tunable, and
+depending on your choice, the pitch shifting will sound more or less
+realistic for your use case. A good value to start with is something
+like 5 or 10.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-shelve-cutoff-sect">
+<title>Synth&lowbar;SHELVE&lowbar;CUTOFF</title>
+<anchor id="mref-synth-shelve-cutoff" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_SHELVE_CUTOFF.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;SHELVE&lowbar;CUTOFF</phrase></textobject>
+</mediaobject>
+
+<para>
+Filters out all frequencies over the cutoff frequency.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-brickwall-limiter-sect">
+<title>Synth&lowbar;BRICKWALL&lowbar;LIMITER</title>
+<anchor id="mref-synth-brickwall-limiter" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_BRICKWALL_LIMITER.png"
+ format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;BRICKWALL&lowbar;LIMITER</phrase></textobject>
+</mediaobject>
+
+<para>
+This modules clips a signal to make it fit into the range of [-1;1]. It
+doesn't do anything to prevent the distortion that happens when clipping
+loud signals. You can use this as effect (for instance to create a
+slightly clipped sine wave). However, it's probably a good idea to run
+the signal through a lowpass filter afterwards if you do so, to make it
+sound less aggressive.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-std-equalizer-sect">
+<title>Synth&lowbar;STD&lowbar;EQUALIZER</title>
+<anchor id="mref-synth-std-equalizer" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_STD_EQUALIZER.png" format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;STD&lowbar;EQUALIZER</phrase></textobject>
+</mediaobject>
+
+<para>
+This is a nice parametric equalizer building block. It's parameters are:
+</para>
+
+<variablelist>
+<varlistentry>
+<term>invalue, outvalue</term>
+<listitem>
+<para>
+The signal that gets filtered by the equalizer.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>low</term>
+<listitem>
+<para>
+How low frequencies should be changed. The value is in dB, while 0 means
+don't change low frequencies, -6 would mean take them out by 6dB, and +6
+mean boost them by 6dB.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>mid</term>
+<listitem>
+<para>
+How middle frequencies should be changed by the equalizer in dB (see
+low).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>high</term>
+<listitem>
+<para>
+How high frequencies should be changed by the equalizer in dB (see low).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>frequency</term>
+<listitem>
+<para>
+This is the center frequency of the equalizer in Hz, the mid frequencies
+are around that spectrum, the low and high frequencies below and above.
+Note that the frequency may not be higher than half the sampling rate,
+usually that is 22050 Hz, and not lower than 1 Hz.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>q</term>
+<listitem>
+<para>
+This influences how broad the mid spectrum is. It must be be a positive
+number &gt; 0. A value of one is reasonable, higher values of q mean a
+narrower spectrum of middle frequencies. Lower values than one mean a
+broader spectrum.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+</sect3>
+
+<sect3 id="mref-synth-rc-sect">
+<title>Synth&lowbar;RC</title>
+<anchor id="mref-synth-rc" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_RC.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;RC</phrase></textobject>
+</mediaobject>
+
+<para>
+A damped resonator filter filtering all frequencies around some peak
+value. There is no useful way of specifying middle frequency (that
+won't be cut), since the input are two strange constants f and b. The
+code is very old, from the first days of the synthesizer, and will
+probably replaced by a new filter which will have a frequency and a
+resonance value as parameters.
+</para>
+
+<para>
+Try something like b=5, f=5 or b=10, f=10 or b=15, f=15 though.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-moog-vcf-sect">
+<title>Synth&lowbar;MOOG&lowbar;VCF</title>
+<anchor id="mref-synth-moog-vcf" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_MOOG_VCF.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;MOOG&lowbar;VCF</phrase></textobject>
+</mediaobject>
+
+<para>
+Filters out all frequencies over the cutoff frequency (it's a 24db 4pole
+filter, which filters -24db per octave above the cutoff frequency), but
+offers an additional parameter for tuning the filter resonance, while 0
+means no resonance and 4 means self oscillation.
+</para>
+
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-midi-sequencing">
+<title>Midi + Sequencing</title>
+
+<sect3 id="mref-synth-midi-test-sect">
+<title>Synth&lowbar;MIDI&lowbar;TEST</title>
+<anchor id="mref-synth-midi-test" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_MIDI_TEST.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;MIDI&lowbar;TEST</phrase></textobject>
+</mediaobject>
+
+<para>
+This modules loads an instrument structure from a file, and registers
+itself as midi output with the &arts; &MIDI; manager. Notes sent to this
+output will result in instrument voices being created.
+</para>
+
+<note>
+<para>
+You can setup something like this more convenient in &artscontrol; than
+manually in &arts-builder;.
+</para>
+</note>
+
+</sect3>
+
+<sect3 id="mref-synth-sequence-sect">
+<title>Synth&lowbar;SEQUENCE</title>
+<anchor id="mref-synth-sequence" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_SEQUENCE.png"
+format="PNG" /></imageobject></mediaobject>
+
+<para>
+Will play a sequence of notes over and over again. The notes are given
+in tracker notation, and are separated by semicolons. An example is
+<literal>A-3;C-4;E-4;C-4;</literal>. The speed is given as seconds per
+note, so if you want to get 120 bpm, you will probably specify 0.5
+seconds/note, as 60 seconds/0.5 seconds per note=120 bpm.
+</para>
+
+<para>
+You can give each note an length relative to the speed by using a colon
+after the note and then then
+length. <literal>A-3:2;C-4:0.5;D-4:0.5;E-4;</literal> demonstrates
+this. As you see, midi composing programs tend to offer more comfort ;)
+</para>
+
+<para>
+The Synth&lowbar;SEQUENCE gives additional information about the
+position of the note it is playing right now, while 0 means just started
+and 1 means finished. This information you can use with
+Synth&lowbar;PSCALE (see below).
+</para>
+</sect3>
+
+<sect3 id="mref-synth-sequence-freq-sect">
+<title>Synth&lowbar;SEQUENCE&lowbar;FREQ</title>
+<anchor id="mref-synth-sequence-freq" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_SEQUENCE_FREQ.png"
+format="PNG" /></imageobject></mediaobject>
+
+<para>
+This module works just like Synth&lowbar;SEQUENCE with the only difference that
+you don't write notenames but frequencies.
+</para>
+
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-samples">
+<title>Samples</title>
+
+<sect3 id="mref-synth-play-wav-sect">
+<title>Synth&lowbar;PLAY&lowbar;WAV</title>
+<anchor id="mref-synth-play-wav" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_PLAY_WAV.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;PLAY&lowbar;WAV</phrase></textobject>
+</mediaobject>
+
+<para>
+This will play a <literal role="extension">wav</literal> file. It will
+only be present if you have libaudiofile on your computer. The wave file
+will start as soon as the module gets created.
+</para>
+
+<para>
+It will stop as soon as it's over, then finished will be set to 1. The
+speed parameter can be used to replay the file faster or slower, where
+1.0 is the normal (recorded) speed.
+</para>
+<!-- TODO: KDE2.2: check that this really works together in instruments with
+the done parameter things ;) -->
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-soundio">
+<title>Sound IO</title>
+
+<sect3 id="mref-synth-play-sect">
+<title>Synth&lowbar;PLAY</title>
+<anchor id="mref-synth-play" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_PLAY.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;PLAY</phrase></textobject>
+</mediaobject>
+
+<important>
+<para>
+You will normally not need this module, unless you are writing
+standalone applications. Inside &artsd;, there normally is already a
+Synth&lowbar;PLAY module, and creating another one will not work.
+</para>
+</important>
+
+<para>
+The Synth&lowbar;PLAY-module will output your audio signal to the
+soundcard. The left and right channels should contain the
+<emphasis>normalized</emphasis> input for the channels. If your input
+is not between -1 and 1, you get clipping.
+</para>
+
+<para>
+As already mentioned, there may only be one Synth&lowbar;PLAY module
+used, as this one directly accesses your soundcard. Use busses if you
+want to mix more than one audio stream together before playing. Use the
+Synth&lowbar;AMAN&lowbar;PLAY module to get something like an output
+inside &artsd;.
+</para>
+
+<para>
+Note that Synth&lowbar;PLAY also does the timing of the whole
+structure. This means: no Synth&lowbar;PLAY = no source for timing = no
+sound. So you absolutely need (exactly) one Synth&lowbar;PLAY object.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-record-sect">
+<title>Synth&lowbar;RECORD</title>
+<anchor id="mref-synth-record" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_RECORD.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;RECORD</phrase></textobject>
+</mediaobject>
+
+<important>
+<para>You will normally not need this module, unless you are writing
+standalone applications. Inside artsd, there normally is already a
+Synth&lowbar;RECORD module, and creating another one will not work.
+</para>
+</important>
+
+<para>
+The Synth&lowbar;RECORD-module will record a signal from the soundcard.
+The left and right channels will contain the input for the channels
+(between -1 and 1).
+</para>
+
+<para>
+As already mentioned, there may only be one Synth&lowbar;RECORD module
+used, as this one directly accesses your soundcard. Use busses if you
+want to use the recorded audio stream in more than one place. Use the
+Synth&lowbar;AMAN&lowbar;RECORD module to get something like an input
+inside artsd. For this to work, &artsd; must run <emphasis>with full
+duplex enabled </emphasis>.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-aman-play-sect">
+<title>Synth&lowbar;AMAN&lowbar;PLAY</title>
+<anchor id="mref-synth-aman-play" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_AMAN_PLAY.png"
+format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;AMAN&lowbar;PLAY</phrase></textobject>
+</mediaobject>
+
+<para>
+The Synth&lowbar;AMAN&lowbar;PLAY-module will output your audio signal.
+It is nice (but not necessary) if you output a normalized signal
+(between -1 and 1).
+</para>
+
+<para>
+This module will use the audio manager to assign where the signal will
+be played. The audio manager can be controlled through &artscontrol;. To
+make it more intuitive to use, it is good to give the signal you play a
+name. This can be achieved through setting
+<emphasis>title</emphasis>. Another feature of the audio manager is to
+be able to remember where you played a signal the last time. To do so it
+needs to be able to distinguish signals. That is why you should assign
+something unique to <emphasis>autoRestoreID</emphasis>, too.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-aman-record-sect">
+<title>Synth&lowbar;AMAN&lowbar;RECORD</title>
+<anchor id="mref-synth-aman-record" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_AMAN_RECORD.png" format="PNG"/></imageobject>
+<textobject><phrase>Synth&lowbar;AMAN&lowbar;RECORD</phrase></textobject>
+</mediaobject>
+
+<para>
+The Synth&lowbar;AMAN&lowbar;RECORD-module will record an audio signal
+from an external source (&ie;. line in/microphone) within &artsd;. The
+output will be a normalized signal (between -1 and 1).
+</para>
+
+<para>
+This module will use the audio manager to assign where the signal will
+be played. The audio manager can be controlled through artscontrol. To
+make it more intuitive to use, it is good to give the signal you record
+a name. This can be achieved through setting
+<emphasis>title</emphasis>. Another feature of the audio manager is to
+be able to remember where you recorded a signal the last time. To do so
+it needs to be able to distinguish signals. That is why you should
+assign something unique to <emphasis>autoRestoreID</emphasis>, too.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-capture-sect">
+<title>Synth&lowbar;CAPTURE</title>
+<anchor id="mref-synth-capture" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_CAPTURE.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;CAPTURE</phrase></textobject>
+</mediaobject>
+
+<para>
+The Synth&lowbar;CAPTURE-module will write an audio signal to a wave
+file on your hard disc. The file will always be called
+<filename>/tmp/mcop-<replaceable>usename</replaceable>/capture.wav</filename>
+</para>
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-tests">
+<title>Tests</title>
+
+<sect3 id="mref-synth-nil-sect">
+<title>Synth&lowbar;NIL</title>
+<anchor id="mref-synth-nil" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_NIL.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;NIL</phrase></textobject>
+</mediaobject>
+
+<para>
+This just does nothing. It is only useful for test situations.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-debug-sect">
+<title>Synth&lowbar;DEBUG</title>
+<anchor id="mref-synth-debug" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_DEBUG.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;DEBUG</phrase></textobject>
+</mediaobject>
+
+<para>
+You can use this for debugging. It will print out the value of the
+signal at invalue in regular intervals (ca. 1 second), combined with the
+comment you have specified. That way you can find out if some signals
+stay in certain ranges, or if they are there at all.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-midi-debug-sect">
+<title>Synth&lowbar;MIDI&lowbar;DEBUG</title>
+<anchor id="mref-synth-midi-debug" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_MIDI_DEBUG.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;MIDI&lowbar;DEBUG</phrase></textobject>
+</mediaobject>
+
+<para>
+You can use this to debug how your &MIDI; events are actually arriving
+in &arts;.
+</para>
+
+<para>
+When a MIDI&lowbar;DEBUG is running &artsserver; will print out a lines
+like:
+</para>
+
+<screen><computeroutput>201 100753.837585 on 0 42 127</computeroutput></screen>
+
+<screen><computeroutput>202 101323.128355 off 0 42</computeroutput></screen>
+
+<para>
+While the first line would be telling you that 100753ms (that is 100
+seconds) after the MIDI&lowbar;DEBUG started, a &MIDI; on event arrived
+on channel 0. This midi on event had the velocity (volume) of 127, the
+loudest possible. The next line shows the midi release event. [ TODO:
+this does not work currently, make it work, and do it via &MIDI; manager
+].
+</para>
+</sect3>
+
+<sect3 id="mref-synth-data-sect">
+<title>Synth&lowbar;DATA</title>
+<anchor id="mref-synth-data" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_DATA.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;DATA</phrase></textobject>
+</mediaobject>
+
+<para>
+This creates a signal with a constant number.
+</para>
+<!-- TODO: this doesn't really belong in test, does it? -->
+</sect3>
+</sect2>
+
+<sect2 id="mcat-synth-osc-mod">
+<title>Oscillation &amp; Modulation</title>
+
+<sect3 id="mref-synth-frequency-sect">
+<title>Synth&lowbar;FREQUENCY</title>
+<anchor id="mref-synth-frequency" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_FREQUENCY.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;FREQUENCY</phrase></textobject>
+</mediaobject>
+
+<para>
+All oscillators in &arts; don't require a frequency as input, but a
+position in the wave. The position should be between 0 and 1, which maps
+for a standard Synth&lowbar;WAVE&lowbar;SIN object to the range
+0..2*pi. To generate oscillating values from a frequency, a
+Synth&lowbar;FREQUENCY modules is used.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-fm-source-sect">
+<title>Synth&lowbar;FM&lowbar;SOURCE</title>
+<anchor id="mref-synth-fm-source" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_FM_SOURCE.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;FM&lowbar;SOURCE</phrase></textobject>
+</mediaobject>
+
+<para>
+This is used for frequency modulation. Put your frequency to the
+frequency input and put another signal on the modulator input. Then set
+modlevel to something, say 0.3. The frequency will be modulated with
+modulator then. Just try it. Works nice when you put a feedback in
+there, that means take a combination of the delayed output signal from
+the Synth&lowbar;FM&lowbar;SOURCE (you need to put it to some oscillator
+as it only takes the role of Synth&lowbar;FREQUENCY) and some other
+signal to get good results.
+</para>
+
+<para>
+Works nicely in combination with Synth&lowbar;WAVE&lowbar;SIN
+oscillators.
+</para>
+</sect3>
+
+</sect2>
+
+<sect2 id="mcat-synth-waveforms">
+<title>Wave Forms</title>
+
+<sect3 id="mref-synth-wave-sin-sect">
+<title>Synth&lowbar;WAVE&lowbar;SIN</title>
+<anchor id="mref-synth-wave-sin" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_WAVE_SIN.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;WAVE&lowbar;SIN</phrase></textobject>
+</mediaobject>
+
+<para>
+Sinus oscillator. Put a pos signal from Synth&lowbar;FREQUENCY or
+Synth&lowbar;FM&lowbar;SOURCE at the input. And get a sinus wave as
+output. The pos signal specifies the position in the wave, the range
+0..1 is mapped to 0..2*pi internally.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-wave-tri-sect">
+<title>Synth&lowbar;WAVE&lowbar;TRI</title>
+<anchor id="mref-synth-wave-tri" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_WAVE_TRI.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;WAVE&lowbar;TRI</phrase></textobject>
+</mediaobject>
+
+<para>
+Triangle oscillator. Put a pos signal from Synth&lowbar;FREQUENCY or
+Synth&lowbar;FM&lowbar;SOURCE at the input. And get a triangle wave as
+output. The pos signal specifies the position in the wave, the range
+0..1 is mapped to 0..2*pi internally. Be careful. The input signal
+<emphasis>must</emphasis> be in the range 0..1 for the output signal to
+produce good results.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-noise-sect">
+<title>Synth&lowbar;NOISE</title>
+<anchor id="mref-synth-noise" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_NOISE.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;NOISE</phrase></textobject>
+</mediaobject>
+
+<para>
+Noise generator. This generates a random signal between -1 and 1.
+</para>
+
+</sect3>
+
+<sect3 id="mref-synth-wave-square-sect">
+<title>Synth&lowbar;WAVE&lowbar;SQUARE</title>
+<anchor id="mref-synth-wave-square" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_WAVE_SQUARE.png" format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;WAVE&lowbar;SQUARE</phrase></textobject>
+</mediaobject>
+
+<para>
+Square oscillator. Put a pos signal from Synth&lowbar;FREQUENCY or
+Synth&lowbar;FM&lowbar;SOURCE at the input. And get a square wave as
+output. The pos signal specifies the position in the wave, the range
+0..1 is mapped to 0..2*pi internally. Be careful. The input signal
+<emphasis>must</emphasis> be in the range 0..1 for the output signal to
+produce good results.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-wave-softsaw-sect">
+<title>Synth&lowbar;WAVE&lowbar;SOFTSAW</title>
+<anchor id="mref-synth-wave-softsaw" />
+
+<mediaobject><imageobject><imagedata
+fileref="images/Synth_WAVE_SOFTSAW.png" format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;WAVE&lowbar;SOFTSAW</phrase></textobject>
+</mediaobject>
+
+<para>
+Softened saw wave, similar in look like the Synth&lowbar;WAVE&lowbar;TRI
+oscillator. Put a pos signal from Synth&lowbar;FREQUENCY or
+Synth&lowbar;FM&lowbar;SOURCE at the input. You'll get a softened saw
+wave as output. The pos signal specifies the position in the wave, the
+range 0..1 is mapped to 0..2*pi internally. Be careful. The input signal
+<emphasis>must</emphasis> be in the range 0..1 for the output signal to
+produce good results.
+</para>
+</sect3>
+
+<sect3 id="mref-synth-wave-pulse-sect">
+<title>Synth&lowbar;WAVE&lowbar;PULSE</title>
+<anchor id="mref-synth-wave-pulse" />
+
+<mediaobject><imageobject><imagedata fileref="images/Synth_WAVE_PULSE.png"
+format="PNG" /></imageobject>
+<textobject><phrase>Synth&lowbar;WAVE&lowbar;PULSE</phrase></textobject>
+</mediaobject>
+
+<para>
+Pulse oscillator - this module is similar in spirit like the rectangular
+oscillator (Synth_WAVE_RECT), but it provides a configurable up/down
+ratio, through the <emphasis>dutycycle</emphasis> parameter. Put a pos
+signal from Synth&lowbar;FREQUENCY or Synth&lowbar;FM&lowbar;SOURCE at
+the input. Get a pulse wave as output. The pos signal specifies the
+position in the wave, the range 0..1 is mapped to 0..2*pi internally. Be
+careful. The input signal <emphasis>must</emphasis> be in the range 0..1
+for the output signal to produce good results.
+</para>
+</sect3>
+</sect2>
+<sect2 id="mcat-synth-misc">
+<title>Miscellaneous</title>
+
+<sect3 id="mref-synth-compressor-sect">
+<title>Synth&lowbar;COMPRESSOR</title>
+<anchor id="mref-synth-compressor" />
+
+<mediaobject>
+<imageobject><imagedata fileref="images/Synth_COMPRESSOR.png"
+ format="PNG"/></imageobject></mediaobject>
+
+<para>
+This module reduces the dynamic range of the signal. For example
+compressors are useful in compensating for the wide variations in
+loudness of somebody talking into a microphone.
+</para>
+
+<para>
+As soon as the input level exceeds a certain level (the threshold)
+the signal gets compressed. It simply multiplies everything above
+the threshold with the ratio, which should be a number between 0 and
+1. Finally the whole signal is multiplied by the output factor.
+</para>
+
+<para>
+The attack and release arguments delay the start and end of the
+compression. Use this if you, for example, still want to hear the
+loud beginning of a basedrum. The argument is in milliseconds and an
+attack or release of 0ms is possible but may result in a slight noise.
+</para>
+
+</sect3>
+</sect2>
+</sect1>
+
+<sect1 id="visual-modules-reference">
+<title>Visual Modules Reference</title>
+
+<para>
+TODO when visual modules are more "finished".
+</para>
+</sect1>
+
+</chapter>
diff --git a/doc/artsbuilder/porting.docbook b/doc/artsbuilder/porting.docbook
new file mode 100644
index 00000000..f039904e
--- /dev/null
+++ b/doc/artsbuilder/porting.docbook
@@ -0,0 +1,64 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="porting">
+<title>Porting Applications to &arts;</title>
+
+<sect1 id="using-artsdsp">
+<title>Using &artsdsp;</title>
+
+<para>
+The &artsdsp; utility, <link linkend="artsdsp">described
+previously</link>, allows most legacy sound applications that talk to
+the audio devices directly, to work properly under &arts;. Applications
+written to use the Enlightenment Sound Daemon
+(<application>esd</application>) will also work in most cases by running
+<application>esd</application> under &artsdsp;.
+</para>
+
+<para>
+This makes a good short term solution to porting existing applications
+to &kde;. However, it does not allow the application to directly take
+advantage of all of the power of &arts;, such as using modules and
+multimedia streams other than digital audio. If the application goes
+beyond simple playing of sound files, it usually makes sense to add
+native support for &arts; to the application.
+</para>
+
+<para>
+Using &arts; also means that application does not have to do as much
+work -- it can leverage the functions in &arts; to handle issues like
+codecs for different media formats and control of the sound hardware.
+</para>
+
+</sect1>
+
+<sect1 id="adding-native-arts-support">
+<title>Adding Native &arts; support</title>
+
+<para>
+When using &arts;, you have a number of different <link
+linkend="arts-apis"><acronym>API</acronym>s</link> to choose from. The
+decision of which to use depends on a number of factors, including what
+type of streaming media is used (sound, &MIDI;, &CD; audio, &etc;), the
+<acronym>API</acronym> features required, and whether it is written in
+C++. In most cases the choice should be relatively obvious based on the
+required features.
+</para>
+
+<para>
+For cross-platform portability, applications that need to run on
+environments other than &kde; cannot rely on &arts; being present. Using
+the plug-ins paradigm is a good way to support different multimedia
+environments. Making the plug-in <acronym>API</acronym> open and
+documented (especially for closed source applications) also has the
+advantage of allowing someone other than the application developer to
+implement an &arts; plug-in.
+</para>
+
+</sect1>
+
+</chapter>
+
diff --git a/doc/artsbuilder/references.docbook b/doc/artsbuilder/references.docbook
new file mode 100644
index 00000000..4978f723
--- /dev/null
+++ b/doc/artsbuilder/references.docbook
@@ -0,0 +1,56 @@
+<!-- <?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="references">
+<title>References</title>
+
+<variablelist>
+
+<varlistentry>
+<term><ulink
+url="http://multimedia.kde.org">http://multimedia.kde.org</ulink></term>
+<listitem>
+<para>
+This is the primary web site for &kde;-related multimedia information.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><ulink
+url="http://www.arts-project.org">http://www.arts-project.org</ulink></term>
+<listitem>
+<para>
+This is the home page for the &arts; project.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>&kde; 2.0 Development</term>
+<listitem>
+<para>
+Chapter 14 of this published book covers multimedia, including
+&arts;. It is available in print or on-line with annotations at <ulink
+url="http://www.andamooka.org/">http://www.andamooka.org</ulink>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term>
+<ulink
+url="http://sound.condorow.net">http://sound.condorow.net</ulink></term>
+<listitem>
+<para>
+This site has a comprehensive listing of sound and &MIDI; applications
+for &Linux;.
+</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+</chapter>
diff --git a/doc/artsbuilder/tools.docbook b/doc/artsbuilder/tools.docbook
new file mode 100644
index 00000000..417951df
--- /dev/null
+++ b/doc/artsbuilder/tools.docbook
@@ -0,0 +1,735 @@
+<!--
+<?xml version="1.0" ?>
+<!DOCTYPE chapter PUBLIC "-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN" "dtd/kdex.dtd">
+
+To validate or process this file as a standalone document, uncomment
+this prolog. Be sure to comment it out again when you are done -->
+
+<chapter id="arts-tools">
+<title>&arts; Tools</title>
+
+<para>
+Included with &arts; is a number of utilities for controlling and
+configuring its behavior. You need to have some familiarity with most of
+these tools in order to use &arts; effectively. This section describes
+each of the utilities and their command options.
+</para>
+
+<sect1 id="kde-control-center">
+<title>&kcontrol;</title>
+
+<para>
+When running &arts; under &kde;, the &kcontrolcenter; provides a group
+of control panel settings under the <guilabel>Sound</guilabel>
+category. Some of these settings are used by &arts;. You can also
+associate sounds with various window manager and &kde; events using the
+<menuchoice><guilabel>Look &amp; Feel</guilabel><guilabel>System
+Notifications</guilabel></menuchoice> panel. See the &kcontrol; manual
+for information on using the panel settings.
+</para>
+
+</sect1>
+
+<sect1 id="artsd">
+<title>&artsd;</title>
+
+<para>
+Access to the sound hardware resources is controlled by &artsd;, the
+&arts; daemon. This allows different applications to simultaneously send
+requests to the server, where they can be mixed together and
+played. Without a centralized sound server a single application using a
+sound device would prevent other applications from using it.
+</para>
+
+<para>
+To use &arts; there should be one and only one copy of &artsd;
+running. It is typically run when &kde; starts up if it is enabled in
+the &kcontrol; <guilabel>Sound Server</guilabel> panel.
+</para>
+
+<para>The program accepts the following arguments:</para>
+
+<!-- LW: FIX THIS -->
+
+<cmdsynopsis>
+<command>artsd</command>
+<group choice="opt">
+<option>-n</option>
+<option>-p</option>
+<option>-N</option>
+<option>-W <replaceable>n</replaceable></option>
+
+</group>
+<group choice="opt">
+<option>-a <replaceable>audiomethod</replaceable></option>
+<option>-r <replaceable>sampling rate</replaceable></option>
+<option>-b <replaceable>bits</replaceable></option>
+<option>-d</option>
+<option>-D <replaceable>devicename</replaceable></option>
+<option>-F <replaceable>fragments</replaceable></option>
+<option>-S <replaceable>size</replaceable></option>
+<option>-s <replaceable>seconds</replaceable></option>
+<option>-m <replaceable>appName</replaceable></option>
+</group>
+<group choice="opt">
+<option>-h</option>
+<option>-A</option>
+<option>-v</option>
+<option>-l <replaceable>level</replaceable></option>
+</group>
+</cmdsynopsis>
+
+<variablelist><varlistentry>
+<term><option>-r <replaceable>sampling rate</replaceable></option></term>
+<listitem>
+<para>Set sampling rate to use.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-h</option></term>
+<listitem>
+<para>Display command usage.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-n</option></term>
+<listitem>
+<para>Enable network transparency.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-p <replaceable>port</replaceable></option>
+</term>
+<listitem>
+<para>Set <acronym>TCP</acronym> port to use (implies
+<option>-n</option>).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-u</option></term>
+<listitem>
+<para>Public, no authentication (dangerous).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-d</option></term>
+<listitem>
+<para>Enable full duplex operation.</para>
+</listitem>
+</varlistentry>
+<varlistentry>
+<term><option>-D <replaceable>device name</replaceable></option></term>
+<listitem>
+<para>Specify audio device (usually <filename>/dev/dsp</filename>).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-F <replaceable>fragments</replaceable></option></term>
+<listitem>
+<para>Set number of fragments.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-S <replaceable>size</replaceable></option></term>
+<listitem>
+<para>Set fragment size, in bytes.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-s <replaceable>seconds</replaceable></option></term>
+<listitem>
+<para>Set server auto-suspend time, in seconds. A value of zero
+disables auto-suspend.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-m <replaceable>appName</replaceable></option></term>
+<listitem>
+<para>Specify the name of an application to be used to display error,
+warning, and informational messages. If you are running KDE you can
+use the <application>artsmessage</application> utility for this.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-N</option></term>
+<listitem>
+<para>
+Increase the size of network buffers to a value suitable for running over
+a 10 mbps LAN. This is equivalent to using the -w 5 option (see below).
+</para>
+</listitem>
+</varlistentry>
+<varlistentry>
+<term><option>-w <replaceable>n</replaceable></option></term>
+<listitem>
+<para>
+When running <application>artsd</application> over a network connection
+to another host you typically want to use a larger buffer size to
+avoid dropouts. ARts provides applications with a suggested minimum
+buffer size. Without this option the default size is based on the
+fragment size * fragment count. Using this option you can increase
+the size from the default by a factor of <replaceable>n</replaceable>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-l <replaceable>level</replaceable></option></term>
+<listitem>
+<para>Set information level - 3 (quiet), 2 (warnings), 1 (info), 0
+(debug).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-v</option></term>
+<listitem>
+<para>Display version level.</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+<para>
+In most cases simply running &artsd; will suffice.
+</para>
+</sect1>
+
+<sect1 id="artswrapper">
+<title>&artswrapper;</title>
+
+<para>
+To provide good real-time response, &artsd; is usually run as a
+real-time process (on platforms where real-time priorities are
+supported). This requires <systemitem class="username">root</systemitem>
+permissions, so to minimize the security implications, &artsd; can be
+started using the small wrapper program &artswrapper; which simply sets
+real-time priority (running as <systemitem
+class="username">root</systemitem>) and then executes &artsd; as a
+non-<systemitem class="username">root</systemitem> user.
+</para>
+
+<para>If you make artswrapper SUID <systemitem
+class="username">root</systemitem>, it will likely improve the quality
+of your audio playback by reducing gaps in the music. However, it
+also increases the risk that a bug in the code or a malicious user can
+crash or otherwise harm your machine. In addition, on multi-user
+machines, prioritizing high-quality audio may result in deteriorated
+performance for the users who are trying to make
+<quote>productive</quote> use of the machine.</para>
+
+</sect1>
+
+<sect1 id="artsshell">
+<title>&artsshell;</title>
+
+<para>
+The &artsshell; command is intended as a utility to perform
+miscellaneous functions related to the sound server. It is expected that
+the utility will be extended with new commands in the future (see the
+comments in the source code for some ideas).
+</para>
+
+<para>
+The command accepts the following format:
+</para>
+
+<!-- LW: FIX THIS -->
+
+<cmdsynopsis>
+<command>artsshell</command>
+<group>
+<arg>suspend</arg><arg>status</arg>
+<arg>terminate</arg>
+<arg>autosuspend <replaceable>secs</replaceable></arg>
+<arg>networkbuffers <replaceable>n</replaceable></arg>
+<arg>volume [<replaceable>volume</replaceable>]</arg>
+<arg>stereoeffect <replaceable>options</replaceable></arg>
+</group>
+<group>
+<option>-h</option>
+<option>-q</option>
+</group>
+</cmdsynopsis>
+
+<para>artsshell [options] <replaceable>command</replaceable> [<replaceable>command-options</replaceable>] </para>
+
+<para>
+The following options are supported:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term><option>-q</option></term>
+<listitem>
+<para>Suppress all output.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-h</option></term>
+<listitem>
+<para>Display command usage.</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+<para>The following commands are supported:</para>
+
+<variablelist>
+
+<varlistentry>
+<term><option>suspend</option></term>
+<listitem>
+<para>
+Suspend the sound server.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>status</option></term>
+<listitem>
+<para>Display sound server status information.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>terminate</option></term>
+<listitem>
+<para>
+Terminate the sound server. This may confuse and/or crash any
+applications that are currently using it.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>autosuspend</option> <parameter>seconds</parameter></term>
+<listitem>
+<para>
+Set the autosuspend time to the specified number of seconds. The sound
+server will suspend itself if idle for that period of time. A value of
+zero disables auto-suspend.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>networkbuffers</option> <parameter>n</parameter></term>
+<listitem>
+<para>
+Set the size of the network buffers to be a factor of
+<parameter>n</parameter> times the default size.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>volume</option> [<replaceable>volume</replaceable>]</term>
+<listitem>
+<para>
+Sets volume scaling for sound server audio output. The
+<replaceable>volume</replaceable> argument is a floating point
+value. With no argument the current volume is displayed.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>stereoeffect list</option></term>
+<listitem>
+<para>List all of the available stereo effect modules.</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>stereoeffect insert [top|bottom]</option> <replaceable>name</replaceable></term>
+<listitem>
+<para>Insert a stereo effect into the stereo effect stack. Returns
+an identifier that can be used for later removing it. It can be
+installed at the top or the bottom (the default).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>stereoeffect remove</option> <replaceable>id</replaceable></term>
+<listitem>
+<para>Removes the stereo effect with identifier
+<replaceable>id</replaceable> from the effects stack.</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+</sect1>
+
+<sect1 id="artsplay">
+<title><application>artsplay</application></title>
+
+<para>The <application>artsplay</application> command is a simple utility to
+play a sound file. It accepts a single argument corresponding to the name of a
+sound file which is sent to the sound server to be played. The sound
+file can be any common sound file type such as <literal
+role="extension">wav</literal> or <literal
+role="extension">au</literal>. This utility is good for testing that the
+sound server is working. By running two commands in parallel or in rapid
+succession you can demonstrate how the sound servers mixes more than one
+sound source.</para>
+
+</sect1>
+
+<sect1 id="artsdsp">
+<title><application>artsdsp</application></title>
+
+<para>
+The sound server only supports applications that are &arts;-aware. Many
+legacy applications want to access the sound device directly. The
+&artsdsp; command provides an interim solution that
+allows most of these applications to run unchanged.
+</para>
+
+<para>
+When an application is run under &artsdsp; all accesses to the <filename
+class="devicefile">/dev/dsp</filename> audio device are intercepted and
+mapped into &arts; <acronym>API</acronym> calls. While the device
+emulation is not perfect, most applications work this way, albeit with
+some degradation in performance and latency.
+</para>
+
+<para>The &artsdsp; command follows the format:
+</para>
+
+<!-- LW: FIX THIS -->
+<para>
+artsdsp [<replaceable>options</replaceable>] <replaceable>application arguments</replaceable>
+</para>
+
+<para>
+The following options are recognized:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term><option>-h</option>, <option>--help</option></term>
+<listitem>
+<para>Show brief help.</para>
+</listitem>
+</varlistentry>
+<varlistentry>
+<term><option>-n</option> <option>--name</option> = <replaceable>name</replaceable></term>
+<listitem>
+<para>Use <replaceable>name</replaceable> to identify player to <command>artsd</command>.</para>
+
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-m</option> <option>--mmap</option></term>
+<listitem>
+<para>Emulate memory mapping (&eg; for <application>Quake</application>).</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-v</option> <option>--verbose</option></term>
+<listitem>
+<para>Show parameters.</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+<para>
+A typical invocation is:
+</para>
+
+<para>
+<userinput><command>artsdsp</command> <option>-v</option> <option>-m</option> <parameter>realplay <replaceable>song.mp3</replaceable></parameter></userinput>
+</para>
+
+<para>
+Some applications work better with the <option>--mmap</option>
+option. Not all features of the sound device are fully emulated, but
+most applications should work. If you find one that does not, submit a
+detailed bug report and the developers may be able to fix it. Again,
+remember this is an interim solution and something of an ugly hack; the
+best solution is to add native &arts; support to the applications. If
+your favorite sound application does not have &arts; support, ask the
+developer to provide it.
+</para>
+
+</sect1>
+
+<sect1 id="artscat">
+<title><application>artscat</application></title>
+
+<para>
+This is a simple utility to send raw audio data to the sound server.
+You need to specify the data format (sampling rate, sample size, and
+number of channels). This is probably not a utility that you will use
+often, but it can be handy for testing purposes. The command syntax is:
+</para>
+
+<!-- LW: FIX THIS -->
+
+<para>
+artscat [ <replaceable>options</replaceable> ] [ <replaceable>filename</replaceable> ]
+</para>
+
+<para>
+If no file name is specified, it reads standard input. The following
+options are supported:
+</para>
+
+<variablelist>
+<varlistentry>
+<term><option>-r</option> <parameter>sampling
+rate</parameter></term>
+<listitem>
+<para>
+Set the sampling rate to use.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-b</option> <parameter>bits</parameter></term>
+<listitem>
+<para>
+Set sample size to use (8 or 16).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-c</option> <parameter>channels</parameter></term>
+<listitem>
+<para>
+Set number of channels (1 or 2).
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-h</option></term>
+<listitem>
+<para>
+Display command usage and exit.
+</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+</sect1>
+
+<sect1 id="artscontrol">
+<title>&artscontrol;</title>
+
+<para>
+This is a graphical utility for performing a number of tasks related to
+the sound server. The default window displays two volume level
+indicators and a slider to control overall output volume. From the
+<guimenu>View</guimenu> menu you can select other functions:
+</para>
+
+<variablelist>
+
+<varlistentry>
+<term><guimenuitem>FFT Scope</guimenuitem></term>
+<listitem>
+<para>
+Opens a window which shows a real-time spectrum analyzer style display.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>Audio Manager</guimenuitem></term>
+<listitem>
+<para>
+Displays active sound sources and allows you to connect them to any of
+the available busses.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>aRts Status</guimenuitem></term>
+<listitem>
+<para>
+Shows if the sound server is running and if scheduling is
+real-time. Indicates when server will autosuspend and allows you to
+suspend it immediately.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>Midi Manager</guimenuitem></term>
+<listitem>
+<para>
+Shows active &MIDI; inputs and outputs and allows you to make connections
+[TODO: Does this work yet? Need more detail].
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>FreeVerb</guimenuitem></term>
+<listitem>
+<para>
+Connects a FreeVerb reverb effect to the stack of &arts; output effects
+and allows you to control the effect parameters graphically.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><guimenuitem>Leds-like volume display</guimenuitem></term>
+<listitem>
+<para>
+Changes the volume indicators in the main window to use a colored
+<acronym>LED</acronym> display format instead of progress bars.
+</para>
+</listitem>
+</varlistentry>
+
+</variablelist>
+
+</sect1>
+
+<sect1 id="artsc-config">
+<title><application>artsc-config</application></title>
+
+<para>
+This is a utility to assist developers using the &arts; C
+<acronym>API</acronym>. It outputs the appropriate compiler and linker
+options needed when compiling and linking code with &arts;. It is
+intended to be used within make files to assist in portability. The
+command accepts three options:
+</para>
+
+<variablelist>
+<varlistentry>
+<term><option>--cflags</option></term>
+<listitem>
+<para>
+Displays the compiler flags needed when compiling with the &arts; C
+<acronym>API</acronym>.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>--libs</option></term>
+<listitem>
+<para>
+Displays the linker flags needed when linking with the &arts; C
+<acronym>API</acronym>.
+</para>
+</listitem>
+</varlistentry>
+<varlistentry>
+<term><acronym>--version</acronym></term>
+<listitem>
+<para>
+Displays the version of the <command>artsc-config</command> command.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+<para>Typical output from the command is shown below:</para>
+
+<screen width="40">
+<prompt>%</prompt> <userinput><command>artsc-config</command> <option>--cflags</option></userinput>
+<computeroutput>-I/usr/local/kde2/include/artsc</computeroutput>
+<prompt>%</prompt> <userinput><command>artsc-config</command> <option>--libs</option></userinput>
+<computeroutput>-L/usr/local/kde2/lib -ldl -lartsc -DPIC -fPIC -lpthread</computeroutput>
+<prompt>%</prompt> <userinput><command>artsc-config</command> <option>--version</option></userinput>
+<computeroutput>0.9.5</computeroutput>
+</screen>
+
+<para>
+You could use this utility in a make file using a rule such as:
+</para>
+
+<programlisting>
+artsc: artsc.c
+ gcc `artsc-config --cflags` -o artsc artsc.c `artsc-config --libs`
+</programlisting>
+
+</sect1>
+
+<sect1 id="mcopidl">
+<title>&mcopidl;</title>
+
+<para>
+The &mcopidl; command is the &IDL; file compiler for &MCOP;, the
+Multimedia Communication Protocol used by &arts;. Interfaces in &arts;
+are defined in &IDL;, a language independent Interface Definition
+Language. The &mcopidl; utility accepts an &IDL; file as input and
+generates C++ header and source files for a class implementing the
+interface. The command accepts the following syntax:
+</para>
+
+<!-- LW: FIX THIS -->
+
+<para>mcopidl [ <replaceable>options</replaceable> ] <replaceable>filename</replaceable>
+</para>
+
+<para>The valid options are:</para>
+<variablelist>
+<varlistentry>
+<term><option>-I</option> <parameter>directory</parameter></term>
+<listitem>
+<para>
+Search in <parameter>directory</parameter> for includes.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-e</option> <parameter>name</parameter></term>
+<listitem>
+<para>
+Exclude a struct, interface, or enum type <parameter>name</parameter>
+from code generation.
+</para>
+</listitem>
+</varlistentry>
+
+<varlistentry>
+<term><option>-t</option></term>
+<listitem>
+<para>
+Also create <literal role="extension">.mcoptype</literal>/<literal
+role="extension">.mcopclass</literal> files containing type information
+for the &IDL; file.
+</para>
+</listitem>
+</varlistentry>
+</variablelist>
+
+<para>
+More information about &MCOP; and &IDL; is covered in the section <link
+linkend="interfaces">Interfaces and &IDL;</link>.
+</para>
+
+</sect1>
+
+</chapter>