Compare commits
231 Commits
eric-proto
...
luke_tests
Author | SHA1 | Date | |
---|---|---|---|
0eca86107e | |||
0de953a3ef | |||
f3b20b809d | |||
d159b921eb | |||
4e4cf46b16 | |||
6b61533650 | |||
419885485b | |||
0c8504b6db | |||
654c34a1a6 | |||
a6c8d35491 | |||
366264828e | |||
d3a6ff6043 | |||
5ca84b91dc | |||
87935c6678 | |||
92980508c0 | |||
65880db422 | |||
767d9c7804 | |||
2566377e2a | |||
1fbbdd43f9 | |||
8cdfbb379e | |||
0a55e26520 | |||
68a8509c12 | |||
5f98d2ed5c | |||
ef1be141c1 | |||
bbfd40d56d | |||
5d08da46a2 | |||
66045acf1d | |||
6977c4315c | |||
839b3000a8 | |||
ffa4557928 | |||
003cd11362 | |||
c87227b65a | |||
c07e75f2ac | |||
6b394a5cf7 | |||
2416b8ccd8 | |||
b9921e111d | |||
8f5aa8841d | |||
900d67de87 | |||
22e3c3eeed | |||
4ead37ee48 | |||
bce360bbd5 | |||
2a68cb26f8 | |||
043e70ff49 | |||
66021367f6 | |||
e69f44ae3d | |||
1941a3785e | |||
c7b8db6a2e | |||
1dc4a3be49 | |||
09597016fd | |||
1a2fa0923c | |||
00f2f2cc51 | |||
7a88ca619a | |||
eddd19a245 | |||
2aafecc3b5 | |||
094b2371e9 | |||
c1034d300d | |||
02d67c257f | |||
e04ea85d1e | |||
527d3555f0 | |||
71492ad229 | |||
73d64e5aff | |||
4cb52e1632 | |||
9d042f7bd0 | |||
5ec2d8842e | |||
04c9470f7d | |||
be05c889e2 | |||
04e3a7b5a9 | |||
d8107533d8 | |||
cd4e65e190 | |||
375bfcce8a | |||
c225cfd8b6 | |||
6fe4c6134a | |||
966b1325ed | |||
019dfc01b4 | |||
45c9c5075c | |||
20b97c972b | |||
efc4a4a3c6 | |||
ce6ea89c21 | |||
4207db7c17 | |||
1cd35b3359 | |||
f51a6df5b2 | |||
94417e1330 | |||
2e17dbce55 | |||
a9eed1f5d9 | |||
2dfab3f378 | |||
e583098a96 | |||
b926ca3997 | |||
aacfefad26 | |||
edc4fd7b3e | |||
ff6c27acf2 | |||
69856f18dd | |||
2c084c8cf7 | |||
58ac860cc7 | |||
d44117d625 | |||
6bd42132c4 | |||
4085904362 | |||
b04b4b5902 | |||
383e738ab7 | |||
3681cecb4d | |||
545ed8111a | |||
86071d30c9 | |||
21d1b2ed3e | |||
6157633137 | |||
a08def47e6 | |||
fc5a12bad7 | |||
e323d8e702 | |||
b350ea0393 | |||
8e9e21a02e | |||
4df313c54a | |||
ffc68d448f | |||
a8f03248a8 | |||
cfaf069095 | |||
a6144eabe2 | |||
c139367555 | |||
1586956d57 | |||
b45b496b0e | |||
2cf10cb6f0 | |||
5a5516303d | |||
d6a99bafea | |||
08f2657e18 | |||
6aa2f92930 | |||
87044b41d8 | |||
6ab1c83302 | |||
e371248567 | |||
dbf9463238 | |||
eb12af4da4 | |||
e8417b4f6f | |||
6579566c6e | |||
410e2e02a0 | |||
8ce4f264aa | |||
d885eb48ff | |||
661fe3606a | |||
c4b7e579cf | |||
f6ec8ba963 | |||
0e3d6ed2d7 | |||
01bfc14503 | |||
65c1b1e8b2 | |||
b37bd8f21c | |||
05e3956a0c | |||
9b066f4327 | |||
456551ffeb | |||
ecb9d97adb | |||
0eaa5aa784 | |||
21923e213d | |||
6877fc4892 | |||
dbed3d9d1d | |||
fc8bf16769 | |||
e114a3c9cb | |||
ebc9f6f512 | |||
2b3f03ec28 | |||
8fd9c06be2 | |||
b553eef781 | |||
ee4c513fd4 | |||
c013e6ad33 | |||
c98d567e2f | |||
392f110c8d | |||
6ced3c4896 | |||
4b8bf41a71 | |||
c59a244e9d | |||
4b7f2d268b | |||
45af8396b8 | |||
eeebbc77c3 | |||
72155c3b73 | |||
9aaf2d264a | |||
0951934f77 | |||
68b6ab3224 | |||
b0570ee486 | |||
72105b2aed | |||
9ab8a42340 | |||
7b606c0477 | |||
7d7dbb1bf9 | |||
34a3717fb9 | |||
2698022aaf | |||
90bf15186e | |||
5c3bc03bd2 | |||
0e50fbf706 | |||
f2231770f1 | |||
f5937ec019 | |||
52e3f2ce4a | |||
f0dffaff13 | |||
f31d8983bb | |||
a6395ebaf2 | |||
3b7536c0df | |||
34ade161de | |||
48d10ea17c | |||
fdc3b96beb | |||
95dbeb25a0 | |||
9d91e907e5 | |||
3f81dc0173 | |||
8ff3488926 | |||
611699252e | |||
9ad31fddff | |||
e6d96d999a | |||
eaca9c85f1 | |||
bc2ca89088 | |||
a5ed8f0ef0 | |||
3b3267db9a | |||
d53c16cf3e | |||
66f397239b | |||
1fe2e36d5d | |||
ab44e32afb | |||
41f627091f | |||
1e01c9059c | |||
5f62cd8526 | |||
a6abc74500 | |||
5c674715ee | |||
484fcca557 | |||
802f9983c2 | |||
d26abc438d | |||
2162bf4272 | |||
c454f9612e | |||
71cd59f775 | |||
719ef61461 | |||
152b34b5f4 | |||
61b6c3ce83 | |||
0a78a786c1 | |||
e6e68786f7 | |||
58e7066b9f | |||
ed662077b3 | |||
9f9c20aa4a | |||
73ba9fd4c3 | |||
771be89103 | |||
f740557d96 | |||
aff74c72dd | |||
5d967f5b7b | |||
9b6de9c411 | |||
27e246e709 | |||
ff64d1b2e9 | |||
235a81a534 | |||
4759e1cf54 | |||
f4a123cd17 |
518
LICENSE
Normal file
518
LICENSE
Normal file
@ -0,0 +1,518 @@
|
||||
|
||||
CeCILL FREE SOFTWARE LICENSE AGREEMENT
|
||||
|
||||
Version 2.1 dated 2013-06-21
|
||||
|
||||
|
||||
Notice
|
||||
|
||||
This Agreement is a Free Software license agreement that is the result
|
||||
of discussions between its authors in order to ensure compliance with
|
||||
the two main principles guiding its drafting:
|
||||
|
||||
* firstly, compliance with the principles governing the distribution
|
||||
of Free Software: access to source code, broad rights granted to users,
|
||||
* secondly, the election of a governing law, French law, with which it
|
||||
is conformant, both as regards the law of torts and intellectual
|
||||
property law, and the protection that it offers to both authors and
|
||||
holders of the economic rights over software.
|
||||
|
||||
The authors of the CeCILL (for Ce[a] C[nrs] I[nria] L[ogiciel] L[ibre])
|
||||
license are:
|
||||
|
||||
Commissariat à l'énergie atomique et aux énergies alternatives - CEA, a
|
||||
public scientific, technical and industrial research establishment,
|
||||
having its principal place of business at 25 rue Leblanc, immeuble Le
|
||||
Ponant D, 75015 Paris, France.
|
||||
|
||||
Centre National de la Recherche Scientifique - CNRS, a public scientific
|
||||
and technological establishment, having its principal place of business
|
||||
at 3 rue Michel-Ange, 75794 Paris cedex 16, France.
|
||||
|
||||
Institut National de Recherche en Informatique et en Automatique -
|
||||
Inria, a public scientific and technological establishment, having its
|
||||
principal place of business at Domaine de Voluceau, Rocquencourt, BP
|
||||
105, 78153 Le Chesnay cedex, France.
|
||||
|
||||
|
||||
Preamble
|
||||
|
||||
The purpose of this Free Software license agreement is to grant users
|
||||
the right to modify and redistribute the software governed by this
|
||||
license within the framework of an open source distribution model.
|
||||
|
||||
The exercising of this right is conditional upon certain obligations for
|
||||
users so as to preserve this status for all subsequent redistributions.
|
||||
|
||||
In consideration of access to the source code and the rights to copy,
|
||||
modify and redistribute granted by the license, users are provided only
|
||||
with a limited warranty and the software's author, the holder of the
|
||||
economic rights, and the successive licensors only have limited liability.
|
||||
|
||||
In this respect, the risks associated with loading, using, modifying
|
||||
and/or developing or reproducing the software by the user are brought to
|
||||
the user's attention, given its Free Software status, which may make it
|
||||
complicated to use, with the result that its use is reserved for
|
||||
developers and experienced professionals having in-depth computer
|
||||
knowledge. Users are therefore encouraged to load and test the
|
||||
suitability of the software as regards their requirements in conditions
|
||||
enabling the security of their systems and/or data to be ensured and,
|
||||
more generally, to use and operate it in the same conditions of
|
||||
security. This Agreement may be freely reproduced and published,
|
||||
provided it is not altered, and that no provisions are either added or
|
||||
removed herefrom.
|
||||
|
||||
This Agreement may apply to any or all software for which the holder of
|
||||
the economic rights decides to submit the use thereof to its provisions.
|
||||
|
||||
Frequently asked questions can be found on the official website of the
|
||||
CeCILL licenses family (http://www.cecill.info/index.en.html) for any
|
||||
necessary clarification.
|
||||
|
||||
|
||||
Article 1 - DEFINITIONS
|
||||
|
||||
For the purpose of this Agreement, when the following expressions
|
||||
commence with a capital letter, they shall have the following meaning:
|
||||
|
||||
Agreement: means this license agreement, and its possible subsequent
|
||||
versions and annexes.
|
||||
|
||||
Software: means the software in its Object Code and/or Source Code form
|
||||
and, where applicable, its documentation, "as is" when the Licensee
|
||||
accepts the Agreement.
|
||||
|
||||
Initial Software: means the Software in its Source Code and possibly its
|
||||
Object Code form and, where applicable, its documentation, "as is" when
|
||||
it is first distributed under the terms and conditions of the Agreement.
|
||||
|
||||
Modified Software: means the Software modified by at least one
|
||||
Contribution.
|
||||
|
||||
Source Code: means all the Software's instructions and program lines to
|
||||
which access is required so as to modify the Software.
|
||||
|
||||
Object Code: means the binary files originating from the compilation of
|
||||
the Source Code.
|
||||
|
||||
Holder: means the holder(s) of the economic rights over the Initial
|
||||
Software.
|
||||
|
||||
Licensee: means the Software user(s) having accepted the Agreement.
|
||||
|
||||
Contributor: means a Licensee having made at least one Contribution.
|
||||
|
||||
Licensor: means the Holder, or any other individual or legal entity, who
|
||||
distributes the Software under the Agreement.
|
||||
|
||||
Contribution: means any or all modifications, corrections, translations,
|
||||
adaptations and/or new functions integrated into the Software by any or
|
||||
all Contributors, as well as any or all Internal Modules.
|
||||
|
||||
Module: means a set of sources files including their documentation that
|
||||
enables supplementary functions or services in addition to those offered
|
||||
by the Software.
|
||||
|
||||
External Module: means any or all Modules, not derived from the
|
||||
Software, so that this Module and the Software run in separate address
|
||||
spaces, with one calling the other when they are run.
|
||||
|
||||
Internal Module: means any or all Module, connected to the Software so
|
||||
that they both execute in the same address space.
|
||||
|
||||
GNU GPL: means the GNU General Public License version 2 or any
|
||||
subsequent version, as published by the Free Software Foundation Inc.
|
||||
|
||||
GNU Affero GPL: means the GNU Affero General Public License version 3 or
|
||||
any subsequent version, as published by the Free Software Foundation Inc.
|
||||
|
||||
EUPL: means the European Union Public License version 1.1 or any
|
||||
subsequent version, as published by the European Commission.
|
||||
|
||||
Parties: mean both the Licensee and the Licensor.
|
||||
|
||||
These expressions may be used both in singular and plural form.
|
||||
|
||||
|
||||
Article 2 - PURPOSE
|
||||
|
||||
The purpose of the Agreement is the grant by the Licensor to the
|
||||
Licensee of a non-exclusive, transferable and worldwide license for the
|
||||
Software as set forth in Article 5 <#scope> hereinafter for the whole
|
||||
term of the protection granted by the rights over said Software.
|
||||
|
||||
|
||||
Article 3 - ACCEPTANCE
|
||||
|
||||
3.1 The Licensee shall be deemed as having accepted the terms and
|
||||
conditions of this Agreement upon the occurrence of the first of the
|
||||
following events:
|
||||
|
||||
* (i) loading the Software by any or all means, notably, by
|
||||
downloading from a remote server, or by loading from a physical medium;
|
||||
* (ii) the first time the Licensee exercises any of the rights granted
|
||||
hereunder.
|
||||
|
||||
3.2 One copy of the Agreement, containing a notice relating to the
|
||||
characteristics of the Software, to the limited warranty, and to the
|
||||
fact that its use is restricted to experienced users has been provided
|
||||
to the Licensee prior to its acceptance as set forth in Article 3.1
|
||||
<#accepting> hereinabove, and the Licensee hereby acknowledges that it
|
||||
has read and understood it.
|
||||
|
||||
|
||||
Article 4 - EFFECTIVE DATE AND TERM
|
||||
|
||||
|
||||
4.1 EFFECTIVE DATE
|
||||
|
||||
The Agreement shall become effective on the date when it is accepted by
|
||||
the Licensee as set forth in Article 3.1 <#accepting>.
|
||||
|
||||
|
||||
4.2 TERM
|
||||
|
||||
The Agreement shall remain in force for the entire legal term of
|
||||
protection of the economic rights over the Software.
|
||||
|
||||
|
||||
Article 5 - SCOPE OF RIGHTS GRANTED
|
||||
|
||||
The Licensor hereby grants to the Licensee, who accepts, the following
|
||||
rights over the Software for any or all use, and for the term of the
|
||||
Agreement, on the basis of the terms and conditions set forth hereinafter.
|
||||
|
||||
Besides, if the Licensor owns or comes to own one or more patents
|
||||
protecting all or part of the functions of the Software or of its
|
||||
components, the Licensor undertakes not to enforce the rights granted by
|
||||
these patents against successive Licensees using, exploiting or
|
||||
modifying the Software. If these patents are transferred, the Licensor
|
||||
undertakes to have the transferees subscribe to the obligations set
|
||||
forth in this paragraph.
|
||||
|
||||
|
||||
5.1 RIGHT OF USE
|
||||
|
||||
The Licensee is authorized to use the Software, without any limitation
|
||||
as to its fields of application, with it being hereinafter specified
|
||||
that this comprises:
|
||||
|
||||
1. permanent or temporary reproduction of all or part of the Software
|
||||
by any or all means and in any or all form.
|
||||
|
||||
2. loading, displaying, running, or storing the Software on any or all
|
||||
medium.
|
||||
|
||||
3. entitlement to observe, study or test its operation so as to
|
||||
determine the ideas and principles behind any or all constituent
|
||||
elements of said Software. This shall apply when the Licensee
|
||||
carries out any or all loading, displaying, running, transmission or
|
||||
storage operation as regards the Software, that it is entitled to
|
||||
carry out hereunder.
|
||||
|
||||
|
||||
5.2 ENTITLEMENT TO MAKE CONTRIBUTIONS
|
||||
|
||||
The right to make Contributions includes the right to translate, adapt,
|
||||
arrange, or make any or all modifications to the Software, and the right
|
||||
to reproduce the resulting software.
|
||||
|
||||
The Licensee is authorized to make any or all Contributions to the
|
||||
Software provided that it includes an explicit notice that it is the
|
||||
author of said Contribution and indicates the date of the creation thereof.
|
||||
|
||||
|
||||
5.3 RIGHT OF DISTRIBUTION
|
||||
|
||||
In particular, the right of distribution includes the right to publish,
|
||||
transmit and communicate the Software to the general public on any or
|
||||
all medium, and by any or all means, and the right to market, either in
|
||||
consideration of a fee, or free of charge, one or more copies of the
|
||||
Software by any means.
|
||||
|
||||
The Licensee is further authorized to distribute copies of the modified
|
||||
or unmodified Software to third parties according to the terms and
|
||||
conditions set forth hereinafter.
|
||||
|
||||
|
||||
5.3.1 DISTRIBUTION OF SOFTWARE WITHOUT MODIFICATION
|
||||
|
||||
The Licensee is authorized to distribute true copies of the Software in
|
||||
Source Code or Object Code form, provided that said distribution
|
||||
complies with all the provisions of the Agreement and is accompanied by:
|
||||
|
||||
1. a copy of the Agreement,
|
||||
|
||||
2. a notice relating to the limitation of both the Licensor's warranty
|
||||
and liability as set forth in Articles 8 and 9,
|
||||
|
||||
and that, in the event that only the Object Code of the Software is
|
||||
redistributed, the Licensee allows effective access to the full Source
|
||||
Code of the Software for a period of at least three years from the
|
||||
distribution of the Software, it being understood that the additional
|
||||
acquisition cost of the Source Code shall not exceed the cost of the
|
||||
data transfer.
|
||||
|
||||
|
||||
5.3.2 DISTRIBUTION OF MODIFIED SOFTWARE
|
||||
|
||||
When the Licensee makes a Contribution to the Software, the terms and
|
||||
conditions for the distribution of the resulting Modified Software
|
||||
become subject to all the provisions of this Agreement.
|
||||
|
||||
The Licensee is authorized to distribute the Modified Software, in
|
||||
source code or object code form, provided that said distribution
|
||||
complies with all the provisions of the Agreement and is accompanied by:
|
||||
|
||||
1. a copy of the Agreement,
|
||||
|
||||
2. a notice relating to the limitation of both the Licensor's warranty
|
||||
and liability as set forth in Articles 8 and 9,
|
||||
|
||||
and, in the event that only the object code of the Modified Software is
|
||||
redistributed,
|
||||
|
||||
3. a note stating the conditions of effective access to the full source
|
||||
code of the Modified Software for a period of at least three years
|
||||
from the distribution of the Modified Software, it being understood
|
||||
that the additional acquisition cost of the source code shall not
|
||||
exceed the cost of the data transfer.
|
||||
|
||||
|
||||
5.3.3 DISTRIBUTION OF EXTERNAL MODULES
|
||||
|
||||
When the Licensee has developed an External Module, the terms and
|
||||
conditions of this Agreement do not apply to said External Module, that
|
||||
may be distributed under a separate license agreement.
|
||||
|
||||
|
||||
5.3.4 COMPATIBILITY WITH OTHER LICENSES
|
||||
|
||||
The Licensee can include a code that is subject to the provisions of one
|
||||
of the versions of the GNU GPL, GNU Affero GPL and/or EUPL in the
|
||||
Modified or unmodified Software, and distribute that entire code under
|
||||
the terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
|
||||
|
||||
The Licensee can include the Modified or unmodified Software in a code
|
||||
that is subject to the provisions of one of the versions of the GNU GPL,
|
||||
GNU Affero GPL and/or EUPL and distribute that entire code under the
|
||||
terms of the same version of the GNU GPL, GNU Affero GPL and/or EUPL.
|
||||
|
||||
|
||||
Article 6 - INTELLECTUAL PROPERTY
|
||||
|
||||
|
||||
6.1 OVER THE INITIAL SOFTWARE
|
||||
|
||||
The Holder owns the economic rights over the Initial Software. Any or
|
||||
all use of the Initial Software is subject to compliance with the terms
|
||||
and conditions under which the Holder has elected to distribute its work
|
||||
and no one shall be entitled to modify the terms and conditions for the
|
||||
distribution of said Initial Software.
|
||||
|
||||
The Holder undertakes that the Initial Software will remain ruled at
|
||||
least by this Agreement, for the duration set forth in Article 4.2 <#term>.
|
||||
|
||||
|
||||
6.2 OVER THE CONTRIBUTIONS
|
||||
|
||||
The Licensee who develops a Contribution is the owner of the
|
||||
intellectual property rights over this Contribution as defined by
|
||||
applicable law.
|
||||
|
||||
|
||||
6.3 OVER THE EXTERNAL MODULES
|
||||
|
||||
The Licensee who develops an External Module is the owner of the
|
||||
intellectual property rights over this External Module as defined by
|
||||
applicable law and is free to choose the type of agreement that shall
|
||||
govern its distribution.
|
||||
|
||||
|
||||
6.4 JOINT PROVISIONS
|
||||
|
||||
The Licensee expressly undertakes:
|
||||
|
||||
1. not to remove, or modify, in any manner, the intellectual property
|
||||
notices attached to the Software;
|
||||
|
||||
2. to reproduce said notices, in an identical manner, in the copies of
|
||||
the Software modified or not.
|
||||
|
||||
The Licensee undertakes not to directly or indirectly infringe the
|
||||
intellectual property rights on the Software of the Holder and/or
|
||||
Contributors, and to take, where applicable, vis-à-vis its staff, any
|
||||
and all measures required to ensure respect of said intellectual
|
||||
property rights of the Holder and/or Contributors.
|
||||
|
||||
|
||||
Article 7 - RELATED SERVICES
|
||||
|
||||
7.1 Under no circumstances shall the Agreement oblige the Licensor to
|
||||
provide technical assistance or maintenance services for the Software.
|
||||
|
||||
However, the Licensor is entitled to offer this type of services. The
|
||||
terms and conditions of such technical assistance, and/or such
|
||||
maintenance, shall be set forth in a separate instrument. Only the
|
||||
Licensor offering said maintenance and/or technical assistance services
|
||||
shall incur liability therefor.
|
||||
|
||||
7.2 Similarly, any Licensor is entitled to offer to its licensees, under
|
||||
its sole responsibility, a warranty, that shall only be binding upon
|
||||
itself, for the redistribution of the Software and/or the Modified
|
||||
Software, under terms and conditions that it is free to decide. Said
|
||||
warranty, and the financial terms and conditions of its application,
|
||||
shall be subject of a separate instrument executed between the Licensor
|
||||
and the Licensee.
|
||||
|
||||
|
||||
Article 8 - LIABILITY
|
||||
|
||||
8.1 Subject to the provisions of Article 8.2, the Licensee shall be
|
||||
entitled to claim compensation for any direct loss it may have suffered
|
||||
from the Software as a result of a fault on the part of the relevant
|
||||
Licensor, subject to providing evidence thereof.
|
||||
|
||||
8.2 The Licensor's liability is limited to the commitments made under
|
||||
this Agreement and shall not be incurred as a result of in particular:
|
||||
(i) loss due the Licensee's total or partial failure to fulfill its
|
||||
obligations, (ii) direct or consequential loss that is suffered by the
|
||||
Licensee due to the use or performance of the Software, and (iii) more
|
||||
generally, any consequential loss. In particular the Parties expressly
|
||||
agree that any or all pecuniary or business loss (i.e. loss of data,
|
||||
loss of profits, operating loss, loss of customers or orders,
|
||||
opportunity cost, any disturbance to business activities) or any or all
|
||||
legal proceedings instituted against the Licensee by a third party,
|
||||
shall constitute consequential loss and shall not provide entitlement to
|
||||
any or all compensation from the Licensor.
|
||||
|
||||
|
||||
Article 9 - WARRANTY
|
||||
|
||||
9.1 The Licensee acknowledges that the scientific and technical
|
||||
state-of-the-art when the Software was distributed did not enable all
|
||||
possible uses to be tested and verified, nor for the presence of
|
||||
possible defects to be detected. In this respect, the Licensee's
|
||||
attention has been drawn to the risks associated with loading, using,
|
||||
modifying and/or developing and reproducing the Software which are
|
||||
reserved for experienced users.
|
||||
|
||||
The Licensee shall be responsible for verifying, by any or all means,
|
||||
the suitability of the product for its requirements, its good working
|
||||
order, and for ensuring that it shall not cause damage to either persons
|
||||
or properties.
|
||||
|
||||
9.2 The Licensor hereby represents, in good faith, that it is entitled
|
||||
to grant all the rights over the Software (including in particular the
|
||||
rights set forth in Article 5 <#scope>).
|
||||
|
||||
9.3 The Licensee acknowledges that the Software is supplied "as is" by
|
||||
the Licensor without any other express or tacit warranty, other than
|
||||
that provided for in Article 9.2 <#good-faith> and, in particular,
|
||||
without any warranty as to its commercial value, its secured, safe,
|
||||
innovative or relevant nature.
|
||||
|
||||
Specifically, the Licensor does not warrant that the Software is free
|
||||
from any error, that it will operate without interruption, that it will
|
||||
be compatible with the Licensee's own equipment and software
|
||||
configuration, nor that it will meet the Licensee's requirements.
|
||||
|
||||
9.4 The Licensor does not either expressly or tacitly warrant that the
|
||||
Software does not infringe any third party intellectual property right
|
||||
relating to a patent, software or any other property right. Therefore,
|
||||
the Licensor disclaims any and all liability towards the Licensee
|
||||
arising out of any or all proceedings for infringement that may be
|
||||
instituted in respect of the use, modification and redistribution of the
|
||||
Software. Nevertheless, should such proceedings be instituted against
|
||||
the Licensee, the Licensor shall provide it with technical and legal
|
||||
expertise for its defense. Such technical and legal expertise shall be
|
||||
decided on a case-by-case basis between the relevant Licensor and the
|
||||
Licensee pursuant to a memorandum of understanding. The Licensor
|
||||
disclaims any and all liability as regards the Licensee's use of the
|
||||
name of the Software. No warranty is given as regards the existence of
|
||||
prior rights over the name of the Software or as regards the existence
|
||||
of a trademark.
|
||||
|
||||
|
||||
Article 10 - TERMINATION
|
||||
|
||||
10.1 In the event of a breach by the Licensee of its obligations
|
||||
hereunder, the Licensor may automatically terminate this Agreement
|
||||
thirty (30) days after notice has been sent to the Licensee and has
|
||||
remained ineffective.
|
||||
|
||||
10.2 A Licensee whose Agreement is terminated shall no longer be
|
||||
authorized to use, modify or distribute the Software. However, any
|
||||
licenses that it may have granted prior to termination of the Agreement
|
||||
shall remain valid subject to their having been granted in compliance
|
||||
with the terms and conditions hereof.
|
||||
|
||||
|
||||
Article 11 - MISCELLANEOUS
|
||||
|
||||
|
||||
11.1 EXCUSABLE EVENTS
|
||||
|
||||
Neither Party shall be liable for any or all delay, or failure to
|
||||
perform the Agreement, that may be attributable to an event of force
|
||||
majeure, an act of God or an outside cause, such as defective
|
||||
functioning or interruptions of the electricity or telecommunications
|
||||
networks, network paralysis following a virus attack, intervention by
|
||||
government authorities, natural disasters, water damage, earthquakes,
|
||||
fire, explosions, strikes and labor unrest, war, etc.
|
||||
|
||||
11.2 Any failure by either Party, on one or more occasions, to invoke
|
||||
one or more of the provisions hereof, shall under no circumstances be
|
||||
interpreted as being a waiver by the interested Party of its right to
|
||||
invoke said provision(s) subsequently.
|
||||
|
||||
11.3 The Agreement cancels and replaces any or all previous agreements,
|
||||
whether written or oral, between the Parties and having the same
|
||||
purpose, and constitutes the entirety of the agreement between said
|
||||
Parties concerning said purpose. No supplement or modification to the
|
||||
terms and conditions hereof shall be effective as between the Parties
|
||||
unless it is made in writing and signed by their duly authorized
|
||||
representatives.
|
||||
|
||||
11.4 In the event that one or more of the provisions hereof were to
|
||||
conflict with a current or future applicable act or legislative text,
|
||||
said act or legislative text shall prevail, and the Parties shall make
|
||||
the necessary amendments so as to comply with said act or legislative
|
||||
text. All other provisions shall remain effective. Similarly, invalidity
|
||||
of a provision of the Agreement, for any reason whatsoever, shall not
|
||||
cause the Agreement as a whole to be invalid.
|
||||
|
||||
|
||||
11.5 LANGUAGE
|
||||
|
||||
The Agreement is drafted in both French and English and both versions
|
||||
are deemed authentic.
|
||||
|
||||
|
||||
Article 12 - NEW VERSIONS OF THE AGREEMENT
|
||||
|
||||
12.1 Any person is authorized to duplicate and distribute copies of this
|
||||
Agreement.
|
||||
|
||||
12.2 So as to ensure coherence, the wording of this Agreement is
|
||||
protected and may only be modified by the authors of the License, who
|
||||
reserve the right to periodically publish updates or new versions of the
|
||||
Agreement, each with a separate number. These subsequent versions may
|
||||
address new issues encountered by Free Software.
|
||||
|
||||
12.3 Any Software distributed under a given version of the Agreement may
|
||||
only be subsequently distributed under the same version of the Agreement
|
||||
or a subsequent version, subject to the provisions of Article 5.3.4
|
||||
<#compatibility>.
|
||||
|
||||
|
||||
Article 13 - GOVERNING LAW AND JURISDICTION
|
||||
|
||||
13.1 The Agreement is governed by French law. The Parties agree to
|
||||
endeavor to seek an amicable solution to any disagreements or disputes
|
||||
that may arise during the performance of the Agreement.
|
||||
|
||||
13.2 Failing an amicable solution within two (2) months as from their
|
||||
occurrence, and unless emergency proceedings are necessary, the
|
||||
disagreements or disputes shall be referred to the Paris Courts having
|
||||
jurisdiction, by the more diligent Party.
|
@ -10,33 +10,29 @@ from obidistutils.serenity.checksystem import is_mac_system
|
||||
|
||||
class build(ori_build):
|
||||
|
||||
def has_ctools(self):
|
||||
return self.distribution.has_ctools()
|
||||
|
||||
def has_files(self):
|
||||
return self.distribution.has_files()
|
||||
|
||||
def has_executables(self):
|
||||
return self.distribution.has_executables()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_littlebigman(self):
|
||||
return True
|
||||
|
||||
def has_pidname(self):
|
||||
return is_mac_system()
|
||||
|
||||
def has_doc(self):
|
||||
return True
|
||||
|
||||
def has_littlebigman(self):
|
||||
return True
|
||||
|
||||
sub_commands = [('littlebigman', has_littlebigman),
|
||||
('pidname',has_pidname),
|
||||
('build_ctools', has_ctools),
|
||||
('build_files', has_files),
|
||||
('build_cexe', has_executables)] \
|
||||
+ ori_build.sub_commands + \
|
||||
[('build_sphinx',has_doc)]
|
||||
|
||||
try:
|
||||
from obidistutils.command.build_sphinx import build_sphinx # @UnusedImport
|
||||
|
||||
sub_commands = [("littlebigman",has_littlebigman),
|
||||
('pidname',has_pidname)
|
||||
] \
|
||||
+ ori_build.sub_commands + \
|
||||
[('build_sphinx',has_doc)]
|
||||
except ImportError:
|
||||
sub_commands = [("littlebigman",has_littlebigman),
|
||||
('pidname',has_pidname)
|
||||
] \
|
||||
+ ori_build.sub_commands
|
||||
|
||||
|
@ -4,10 +4,11 @@ Created on 20 oct. 2012
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
from obidistutils.command.build_ctools import build_ctools
|
||||
from .build_ctools import build_ctools
|
||||
from .build_exe import build_exe
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
|
||||
import os
|
||||
|
||||
class build_cexe(build_ctools):
|
||||
|
||||
@ -38,7 +39,9 @@ class build_cexe(build_ctools):
|
||||
self.set_undefined_options('build_files',
|
||||
('files', 'built_files'))
|
||||
|
||||
self.executables = self.distribution.executables
|
||||
self.executables = self.distribution.executables
|
||||
# self.build_cexe = os.path.join(os.path.dirname(self.build_cexe),'cbinaries')
|
||||
# self.mkpath(self.build_cexe)
|
||||
|
||||
if self.executables:
|
||||
self.check_executable_list(self.executables)
|
||||
@ -70,4 +73,13 @@ class build_cexe(build_ctools):
|
||||
log.info("%s ok",message)
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
build_exe.run(self)
|
||||
|
||||
|
||||
|
||||
|
@ -5,7 +5,8 @@ Created on 20 oct. 2012
|
||||
'''
|
||||
|
||||
|
||||
from obidistutils.command.build_exe import build_exe
|
||||
from .build_exe import build_exe
|
||||
from distutils import log
|
||||
|
||||
class build_ctools(build_exe):
|
||||
description = "build C/C++ executable not distributed with Python extensions"
|
||||
@ -37,19 +38,26 @@ class build_ctools(build_exe):
|
||||
self.executables = self.distribution.ctools
|
||||
self.check_executable_list(self.executables)
|
||||
|
||||
|
||||
if self.littlebigman =='-DLITTLE_END':
|
||||
if self.define is None:
|
||||
self.define=[('LITTLE_END',None)]
|
||||
else:
|
||||
self.define.append('LITTLE_END',None)
|
||||
|
||||
log.info('Look for CPU architecture... %s',self.define)
|
||||
|
||||
self.ctools = set()
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
|
||||
build_exe.run(self)
|
||||
|
||||
for e,p in self.executables: # @UnusedVariable
|
||||
self.ctools.add(e)
|
||||
|
||||
|
||||
|
||||
|
@ -208,3 +208,4 @@ class build_exe(Command):
|
||||
output_dir=self.build_cexe,
|
||||
debug=self.debug)
|
||||
|
||||
|
||||
|
@ -7,87 +7,109 @@ Created on 13 fevr. 2014
|
||||
from distutils import log
|
||||
import os
|
||||
|
||||
from Cython.Distutils import build_ext as ori_build_ext # @UnresolvedImport
|
||||
|
||||
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
class build_ext(ori_build_ext):
|
||||
def modifyDocScripts(self):
|
||||
build_dir_file=open("doc/build_dir.txt","w")
|
||||
print(self.build_lib,file=build_dir_file)
|
||||
build_dir_file.close()
|
||||
|
||||
def initialize_options(self):
|
||||
ori_build_ext.initialize_options(self) # @UndefinedVariable
|
||||
self.littlebigman = None
|
||||
self.built_files = None
|
||||
|
||||
try:
|
||||
from Cython.Distutils import build_ext as ori_build_ext # @UnresolvedImport
|
||||
from Cython.Compiler import Options as cython_options # @UnresolvedImport
|
||||
class build_ext(ori_build_ext):
|
||||
|
||||
def finalize_options(self):
|
||||
ori_build_ext.finalize_options(self) # @UndefinedVariable
|
||||
|
||||
self.set_undefined_options('littlebigman',
|
||||
('littlebigman', 'littlebigman'))
|
||||
|
||||
self.set_undefined_options('build_files',
|
||||
('files', 'built_files'))
|
||||
|
||||
self.cython_c_in_temp = 1
|
||||
|
||||
if self.littlebigman =='-DLITTLE_END':
|
||||
if self.define is None:
|
||||
self.define=[('LITTLE_END',None)]
|
||||
else:
|
||||
self.define.append('LITTLE_END',None)
|
||||
|
||||
def substitute_sources(self,exe_name,sources):
|
||||
"""
|
||||
Substitutes source file name starting by an @ by the actual
|
||||
name of the built file (see --> build_files)
|
||||
"""
|
||||
sources = list(sources)
|
||||
for i in range(len(sources)):
|
||||
message = "%s :-> %s" % (exe_name,sources[i])
|
||||
if sources[i][0]=='@':
|
||||
try:
|
||||
filename = self.built_files[sources[i][1:]]
|
||||
except KeyError:
|
||||
tmpfilename = os.path.join(self.build_temp,sources[i][1:])
|
||||
if os.path.isfile (tmpfilename):
|
||||
filename = tmpfilename
|
||||
else:
|
||||
raise DistutilsSetupError(
|
||||
'The %s filename declared in the source '
|
||||
'files of the program %s have not been '
|
||||
'built by the installation process' % (sources[i],
|
||||
exe_name))
|
||||
sources[i]=filename
|
||||
log.info("%s changed to %s",message,filename)
|
||||
else:
|
||||
log.info("%s ok",message)
|
||||
|
||||
return sources
|
||||
|
||||
def build_extensions(self):
|
||||
# First, sanity-check the 'extensions' list
|
||||
|
||||
for ext in self.extensions:
|
||||
ext.sources = self.substitute_sources(ext.name,ext.sources)
|
||||
def modifyDocScripts(self):
|
||||
build_dir_file=open("doc/sphinx/build_dir.txt","w")
|
||||
print(self.build_lib,file=build_dir_file)
|
||||
build_dir_file.close()
|
||||
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
for ext in self.extensions:
|
||||
log.info("%s :-> %s",ext.name,ext.sources)
|
||||
ext.sources = self.cython_sources(ext.sources, ext)
|
||||
self.build_extension(ext)
|
||||
|
||||
def initialize_options(self):
|
||||
ori_build_ext.initialize_options(self) # @UndefinedVariable
|
||||
self.littlebigman = None
|
||||
self.built_files = None
|
||||
|
||||
|
||||
def run(self):
|
||||
self.modifyDocScripts()
|
||||
ori_build_ext.run(self) # @UndefinedVariable
|
||||
def finalize_options(self):
|
||||
ori_build_ext.finalize_options(self) # @UndefinedVariable
|
||||
|
||||
self.set_undefined_options('littlebigman',
|
||||
('littlebigman', 'littlebigman'))
|
||||
|
||||
self.set_undefined_options('build_files',
|
||||
('files', 'built_files'))
|
||||
|
||||
self.cython_c_in_temp = 1
|
||||
|
||||
if self.littlebigman =='-DLITTLE_END':
|
||||
if self.define is None:
|
||||
self.define=[('LITTLE_END',None)]
|
||||
else:
|
||||
self.define.append('LITTLE_END',None)
|
||||
|
||||
def substitute_sources(self,exe_name,sources):
|
||||
"""
|
||||
Substitutes source file name starting by an @ by the actual
|
||||
name of the built file (see --> build_files)
|
||||
"""
|
||||
sources = list(sources)
|
||||
for i in range(len(sources)):
|
||||
message = "%s :-> %s" % (exe_name,sources[i])
|
||||
if sources[i][0]=='@':
|
||||
try:
|
||||
filename = self.built_files[sources[i][1:]]
|
||||
except KeyError:
|
||||
tmpfilename = os.path.join(self.build_temp,sources[i][1:])
|
||||
if os.path.isfile (tmpfilename):
|
||||
filename = tmpfilename
|
||||
else:
|
||||
raise DistutilsSetupError(
|
||||
'The %s filename declared in the source '
|
||||
'files of the program %s have not been '
|
||||
'built by the installation process' % (sources[i],
|
||||
exe_name))
|
||||
sources[i]=filename
|
||||
log.info("%s changed to %s",message,filename)
|
||||
else:
|
||||
log.info("%s ok",message)
|
||||
|
||||
return sources
|
||||
|
||||
def build_extensions(self):
|
||||
# First, sanity-check the 'extensions' list
|
||||
|
||||
for ext in self.extensions:
|
||||
ext.sources = self.substitute_sources(ext.name,ext.sources)
|
||||
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
for ext in self.extensions:
|
||||
log.info("%s :-> %s",ext.name,ext.sources)
|
||||
ext.sources = self.cython_sources(ext.sources, ext)
|
||||
self.build_extension(ext)
|
||||
|
||||
|
||||
def run(self):
|
||||
self.modifyDocScripts()
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
cython_options.annotate = True
|
||||
ori_build_ext.run(self) # @UndefinedVariable
|
||||
|
||||
|
||||
def has_files(self):
|
||||
return self.distribution.has_files()
|
||||
|
||||
def has_executables(self):
|
||||
return self.distribution.has_executables()
|
||||
|
||||
sub_commands = [('build_files',has_files),
|
||||
('build_cexe', has_executables)
|
||||
] + \
|
||||
ori_build_ext.sub_commands
|
||||
|
||||
|
||||
except ImportError:
|
||||
from distutils.command import build_ext # @UnusedImport
|
||||
|
||||
|
||||
|
||||
|
@ -28,6 +28,10 @@ class build_files(Command):
|
||||
self.files = {}
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
|
||||
for dest,prog,command in self.distribution.files:
|
||||
destfile = os.path.join(self.build_temp,dest)
|
||||
@ -48,6 +52,12 @@ class build_files(Command):
|
||||
|
||||
log.info("Done.\n")
|
||||
|
||||
def has_ctools(self):
|
||||
return self.distribution.has_ctools()
|
||||
|
||||
|
||||
sub_commands = [('build_ctools', has_ctools)] + \
|
||||
Command.sub_commands
|
||||
|
||||
|
||||
|
||||
|
@ -6,14 +6,14 @@ Created on 20 oct. 2012
|
||||
|
||||
import os.path
|
||||
|
||||
from distutils.command.build_scripts import build_scripts as ori_build_scripts,\
|
||||
first_line_re
|
||||
from distutils.command.build_scripts import build_scripts as ori_build_scripts
|
||||
from distutils.util import convert_path
|
||||
from distutils import log, sysconfig
|
||||
from distutils.dep_util import newer
|
||||
from stat import ST_MODE
|
||||
import re
|
||||
|
||||
|
||||
first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
|
||||
|
||||
class build_scripts(ori_build_scripts):
|
||||
|
||||
|
@ -4,23 +4,24 @@ Created on 10 mars 2015
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
from sphinx.setup_command import BuildDoc as ori_build_sphinx # @UnresolvedImport
|
||||
try:
|
||||
from sphinx.setup_command import BuildDoc as ori_build_sphinx # @UnresolvedImport
|
||||
|
||||
class build_sphinx(ori_build_sphinx):
|
||||
'''Build Sphinx documentation in html, epub and man formats
|
||||
'''
|
||||
|
||||
class build_sphinx(ori_build_sphinx):
|
||||
'''
|
||||
Build Sphinx documentation in html, epub and man formats
|
||||
'''
|
||||
|
||||
description = __doc__
|
||||
|
||||
def run(self):
|
||||
self.builder='html'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='epub'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='man'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
|
||||
description = __doc__
|
||||
|
||||
def run(self):
|
||||
self.builder='html'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='epub'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='man'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
except ImportError:
|
||||
pass
|
@ -4,11 +4,16 @@ Created on 6 oct. 2014
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.command.install import install as install_ori
|
||||
# except ImportError:
|
||||
# from distutils.command.install import install as install_ori
|
||||
|
||||
from distutils.command.install import install as install_ori
|
||||
|
||||
class install(install_ori):
|
||||
|
||||
def __init__(self,dist):
|
||||
install_ori.__init__(self, dist)
|
||||
self.sub_commands.insert(0, ('build',lambda self: True))
|
||||
# self.sub_commands.insert(0, ('build',lambda self: True))
|
||||
self.sub_commands.append(('install_sphinx',lambda self: self.distribution.serenity))
|
||||
|
@ -3,6 +3,12 @@ Created on 20 oct. 2012
|
||||
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.command.install_scripts import install_scripts as ori_install_scripts
|
||||
# except ImportError:
|
||||
# from distutils.command.install_scripts import install_scripts as ori_install_scripts
|
||||
|
||||
from distutils.command.install_scripts import install_scripts as ori_install_scripts
|
||||
|
||||
import os.path
|
||||
@ -18,12 +24,10 @@ class install_scripts(ori_install_scripts):
|
||||
def install_public_link(self):
|
||||
self.mkpath(self.public_dir)
|
||||
for file in self.get_outputs():
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
else:
|
||||
log.info("exporting file %s -> %s", file,os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
))
|
||||
log.info("exporting file %s -> %s", file,os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
))
|
||||
if not self.dry_run:
|
||||
dest = os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
)
|
||||
|
@ -43,4 +43,19 @@ class install_sphinx(Command):
|
||||
self.copy_file(os.path.join(epub),
|
||||
os.path.join(self.install_doc,os.path.split(epub)[1]))
|
||||
|
||||
def get_outputs(self):
|
||||
directory=os.path.join(self.install_doc,'html')
|
||||
files = [os.path.join(self.install_doc,'html', f)
|
||||
for dp, dn, filenames in os.walk(directory) for f in filenames] # @UnusedVariable
|
||||
|
||||
directory=os.path.join(self.build_dir,'man')
|
||||
files.append(os.path.join(self.install_doc,'man','man1', f)
|
||||
for dp, dn, filenames in os.walk(directory) for f in filenames) # @UnusedVariable
|
||||
|
||||
directory=os.path.join(self.build_dir,'epub')
|
||||
files.append(os.path.join(self.install_doc, f)
|
||||
for dp, dn, filenames in os.walk(directory) # @UnusedVariable
|
||||
for f in glob.glob(os.path.join(dp, '*.epub')) )
|
||||
|
||||
return files
|
||||
|
@ -49,7 +49,7 @@ class littlebigman(build_exe):
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
little = p.communicate()[0]
|
||||
return little
|
||||
return little.decode('latin1')
|
||||
|
||||
def run(self):
|
||||
build_exe.run(self)
|
||||
|
@ -41,6 +41,10 @@ class pidname(build_exe):
|
||||
else:
|
||||
self.executables = []
|
||||
|
||||
# self.build_cexe = os.path.join(os.path.dirname(self.build_cexe),'cbinaries')
|
||||
# self.mkpath(self.build_cexe)
|
||||
|
||||
|
||||
|
||||
def run(self):
|
||||
if is_mac_system():
|
||||
|
@ -9,8 +9,11 @@ import os.path
|
||||
import glob
|
||||
import sys
|
||||
|
||||
# try:
|
||||
# from setuptools.extension import Extension
|
||||
# except ImportError:
|
||||
# from distutils.extension import Extension
|
||||
|
||||
from distutils.core import setup as ori_setup
|
||||
from distutils.extension import Extension
|
||||
|
||||
from obidistutils.serenity.checkpackage import install_requirements,\
|
||||
@ -22,6 +25,7 @@ from obidistutils.serenity.rerun import rerun_with_anothe_python
|
||||
from distutils import log
|
||||
|
||||
from obidistutils.dist import Distribution
|
||||
from obidistutils.serenity import is_serenity
|
||||
|
||||
|
||||
def findPackage(root,base=None):
|
||||
@ -62,7 +66,10 @@ def findCython(root,base=None,pyrexs=None):
|
||||
cfiles = [x for x in cfiles if x[-2:]==".c"]
|
||||
pyrexs[-1].sources.extend(cfiles)
|
||||
pyrexs[-1].include_dirs.extend(incdir)
|
||||
pyrexs[-1].extra_compile_args.extend(['-msse2','-Wno-unused-function'])
|
||||
pyrexs[-1].extra_compile_args.extend(['-msse2',
|
||||
'-Wno-unused-function',
|
||||
'-Wmissing-braces',
|
||||
'-Wchar-subscripts'])
|
||||
|
||||
except IOError:
|
||||
pass
|
||||
@ -78,8 +85,8 @@ def rootname(x):
|
||||
def prepare_commands():
|
||||
from obidistutils.command.build import build
|
||||
from obidistutils.command.littlebigman import littlebigman
|
||||
# from obidistutils.command.serenity import serenity
|
||||
from obidistutils.command.build_cexe import build_cexe
|
||||
from obidistutils.command.build_sphinx import build_sphinx
|
||||
from obidistutils.command.build_ext import build_ext
|
||||
from obidistutils.command.build_ctools import build_ctools
|
||||
from obidistutils.command.build_files import build_files
|
||||
@ -89,8 +96,11 @@ def prepare_commands():
|
||||
from obidistutils.command.install import install
|
||||
from obidistutils.command.pidname import pidname
|
||||
from obidistutils.command.sdist import sdist
|
||||
|
||||
|
||||
|
||||
|
||||
COMMANDS = {'build':build,
|
||||
# 'serenity':serenity,
|
||||
'littlebigman':littlebigman,
|
||||
'pidname':pidname,
|
||||
'build_ctools':build_ctools,
|
||||
@ -98,12 +108,22 @@ def prepare_commands():
|
||||
'build_cexe':build_cexe,
|
||||
'build_ext': build_ext,
|
||||
'build_scripts':build_scripts,
|
||||
'build_sphinx':build_sphinx,
|
||||
'install_scripts':install_scripts,
|
||||
'install_sphinx':install_sphinx,
|
||||
'install':install,
|
||||
'sdist':sdist}
|
||||
|
||||
# try:
|
||||
# from setuptools.commands import egg_info
|
||||
# COMMANDS['egg_info']=egg_info
|
||||
# except ImportError:
|
||||
# pass
|
||||
try:
|
||||
from obidistutils.command.build_sphinx import build_sphinx
|
||||
COMMANDS['build_sphinx']=build_sphinx
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return COMMANDS
|
||||
|
||||
|
||||
@ -139,19 +159,21 @@ def setup(**attrs):
|
||||
del attrs['requirements']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
enforce_good_python(minversion, maxversion, fork)
|
||||
if is_serenity():
|
||||
|
||||
if (install_requirements(requirementfile)):
|
||||
rerun_with_anothe_python(sys.executable,minversion,maxversion,fork)
|
||||
|
||||
enforce_good_python(minversion, maxversion, fork)
|
||||
|
||||
if (install_requirements(requirementfile)):
|
||||
rerun_with_anothe_python(sys.executable,minversion,maxversion,fork)
|
||||
|
||||
|
||||
try:
|
||||
check_requirements(requirementfile)
|
||||
except RequirementError as e :
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
try:
|
||||
check_requirements(requirementfile)
|
||||
except RequirementError as e :
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
|
||||
if 'distclass' not in attrs:
|
||||
attrs['distclass']=Distribution
|
||||
@ -193,5 +215,12 @@ def setup(**attrs):
|
||||
|
||||
if 'ext_modules' not in attrs:
|
||||
attrs['ext_modules'] = EXTENTION
|
||||
|
||||
# try:
|
||||
# from setuptools.core import setup as ori_setup
|
||||
# except ImportError:
|
||||
# from distutils.core import setup as ori_setup
|
||||
|
||||
from distutils.core import setup as ori_setup
|
||||
|
||||
ori_setup(**attrs)
|
||||
|
@ -4,6 +4,11 @@ Created on 20 oct. 2012
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.dist import Distribution as ori_Distribution
|
||||
# except ImportError:
|
||||
# from distutils.dist import Distribution as ori_Distribution
|
||||
|
||||
from distutils.dist import Distribution as ori_Distribution
|
||||
|
||||
class Distribution(ori_Distribution):
|
||||
@ -29,6 +34,14 @@ class Distribution(ori_Distribution):
|
||||
"By default the name is PACKAGE-VERSION"
|
||||
))
|
||||
|
||||
def run_commands(self):
|
||||
"""Run each command that was seen on the setup script command line.
|
||||
Uses the list of commands found and cache of command objects
|
||||
created by 'get_command_obj()'.
|
||||
"""
|
||||
# self.run_command('littlebigman')
|
||||
ori_Distribution.run_commands(self)
|
||||
|
||||
|
||||
def has_executables(self):
|
||||
return self.executables is not None and self.executables
|
||||
|
@ -11,8 +11,7 @@ import re
|
||||
from distutils.errors import DistutilsError
|
||||
import tempfile
|
||||
|
||||
import importlib
|
||||
import imp
|
||||
from importlib.util import spec_from_file_location # @UnresolvedImport
|
||||
import zipimport
|
||||
|
||||
import argparse
|
||||
@ -106,3 +105,8 @@ def serenity_mode(package,version):
|
||||
return args.serenity
|
||||
|
||||
|
||||
def getVersion(source,main,version):
|
||||
path = os.path.join(source,main,'%s.py' % version)
|
||||
spec = spec_from_file_location('version',path)
|
||||
return spec.loader.load_module().version.strip()
|
||||
|
||||
|
36
distutils.ext/obidistutils/serenity/bootstrappip.py
Normal file
36
distutils.ext/obidistutils/serenity/bootstrappip.py
Normal file
@ -0,0 +1,36 @@
|
||||
'''
|
||||
Created on 22 janv. 2016
|
||||
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
import sys
|
||||
from urllib import request
|
||||
import os.path
|
||||
|
||||
from obidistutils.serenity.util import get_serenity_dir
|
||||
from obidistutils.serenity.rerun import rerun_with_anothe_python
|
||||
from obidistutils.serenity.checkpython import is_a_virtualenv_python
|
||||
|
||||
getpipurl="https://bootstrap.pypa.io/get-pip.py"
|
||||
|
||||
def bootstrap():
|
||||
|
||||
getpipfile=os.path.join(get_serenity_dir(),"get-pip.py")
|
||||
|
||||
with request.urlopen(getpipurl) as getpip:
|
||||
with open(getpipfile,"wb") as out:
|
||||
for l in getpip:
|
||||
out.write(l)
|
||||
|
||||
python = sys.executable
|
||||
|
||||
if is_a_virtualenv_python():
|
||||
command= "%s %s" % (python,getpipfile)
|
||||
else:
|
||||
command= "%s %s --user" % (python,getpipfile)
|
||||
|
||||
os.system(command)
|
||||
|
||||
rerun_with_anothe_python(python)
|
||||
|
@ -7,8 +7,13 @@ Created on 2 oct. 2014
|
||||
import re
|
||||
import os
|
||||
|
||||
import pip # @UnresolvedImport
|
||||
from pip.utils import get_installed_distributions # @UnresolvedImport
|
||||
try:
|
||||
import pip # @UnresolvedImport
|
||||
from pip.utils import get_installed_distributions # @UnresolvedImport
|
||||
except ImportError:
|
||||
from .bootstrappip import bootstrap
|
||||
bootstrap()
|
||||
|
||||
from distutils.version import StrictVersion # @UnusedImport
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
@ -83,6 +88,8 @@ def install_requirements(requirementfile='requirements.txt'):
|
||||
log.info(" Installing requirement : %s" % x)
|
||||
pip_install_package(x)
|
||||
install_something=True
|
||||
if x[0:3]=='pip':
|
||||
return True
|
||||
|
||||
return install_something
|
||||
|
||||
@ -134,7 +141,9 @@ def get_package_requirement(package,requirementfile='requirements.txt'):
|
||||
|
||||
def pip_install_package(package,directory=None,upgrade=True):
|
||||
|
||||
log.info('installing %s in directory %s' % (package,str(directory)))
|
||||
if directory is not None:
|
||||
log.info(' installing %s in directory %s' % (package,str(directory)))
|
||||
|
||||
|
||||
if 'http_proxy' in os.environ and 'https_proxy' not in os.environ:
|
||||
os.environ['https_proxy']=os.environ['http_proxy']
|
||||
@ -144,8 +153,8 @@ def pip_install_package(package,directory=None,upgrade=True):
|
||||
if upgrade:
|
||||
args.append('--upgrade')
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
args.append('--proxy=%s' % os.environ['http_proxy'])
|
||||
if 'https_proxy' in os.environ:
|
||||
args.append('--proxy=%s' % os.environ['https_proxy'])
|
||||
|
||||
if directory is not None:
|
||||
args.append('--target=%s' % directory)
|
||||
|
@ -37,7 +37,7 @@ def is_python_version(path=None,minversion='3.4',maxversion=None):
|
||||
stdout=subprocess.PIPE)
|
||||
pythonversion=str(p.communicate()[0],'utf8').strip()
|
||||
pythonversion = StrictVersion(pythonversion)
|
||||
|
||||
|
||||
return ( pythonversion >=StrictVersion(minversion)
|
||||
and ( maxversion is None
|
||||
or pythonversion < StrictVersion(maxversion))
|
||||
@ -91,9 +91,9 @@ def is_a_virtualenv_python(path=None):
|
||||
|
||||
'''
|
||||
if path is None:
|
||||
rep = sys.base_exec_prefix == sys.exec_prefix
|
||||
rep = sys.base_exec_prefix != sys.exec_prefix
|
||||
else:
|
||||
command = """'%s' -c 'import sys; print(sys.base_exec_prefix == sys.exec_prefix)'""" % path
|
||||
command = """'%s' -c 'import sys; print(sys.base_exec_prefix != sys.exec_prefix)'""" % path
|
||||
p = subprocess.Popen(command,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
|
@ -9,10 +9,10 @@ import sys
|
||||
import venv
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from obidistutils.serenity.globals import local_virtualenv # @UnusedImport
|
||||
from obidistutils.serenity.checkpython import which_virtualenv,\
|
||||
is_python_version, \
|
||||
is_a_virtualenv_python
|
||||
from .globals import local_virtualenv # @UnusedImport
|
||||
from .checkpython import which_virtualenv,\
|
||||
is_python_version, \
|
||||
is_a_virtualenv_python
|
||||
|
||||
|
||||
|
||||
@ -39,6 +39,7 @@ def serenity_virtualenv(envname,package,version,minversion='3.4',maxversion=None
|
||||
maxversion=maxversion) and
|
||||
is_a_virtualenv_python(python))
|
||||
|
||||
|
||||
#
|
||||
# The virtualenv already exist but it is not ok
|
||||
#
|
||||
@ -58,7 +59,7 @@ def serenity_virtualenv(envname,package,version,minversion='3.4',maxversion=None
|
||||
clear=True,
|
||||
symlinks=False,
|
||||
with_pip=True)
|
||||
|
||||
|
||||
# check the newly created virtualenv
|
||||
return serenity_virtualenv(envname,package,version)
|
||||
|
||||
|
2
doc/.gitignore
vendored
2
doc/.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
/build/
|
||||
/doxygen/
|
||||
/build_dir.txt
|
||||
/.DS_Store
|
||||
/.gitignore
|
||||
|
@ -57,7 +57,7 @@ html:
|
||||
@echo "Generating Doxygen documentation..."
|
||||
doxygen Doxyfile
|
||||
@echo "Doxygen documentation generated. \n"
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
$(SPHINXBUILD) -b html -c ./ $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
|
||||
__version__ = '4.0.0'
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
||||
# We can't do the import at the module scope as setup.py has to be able to
|
||||
# import this file to read __version__ without hitting any syntax errors
|
||||
# from both Python 2 & Python 3.
|
||||
|
||||
# By the time this function is called, the directives code will have been
|
||||
# converted with 2to3 if appropriate
|
||||
|
||||
from . import directives
|
||||
|
||||
directives.setup(app)
|
Binary file not shown.
Binary file not shown.
@ -1,88 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen import format_parser_error
|
||||
from ..parser import ParserError, FileIOError
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers import rst
|
||||
|
||||
|
||||
class WarningHandler(object):
|
||||
|
||||
def __init__(self, state, context):
|
||||
self.state = state
|
||||
self.context = context
|
||||
|
||||
def warn(self, raw_text, rendered_nodes=None):
|
||||
raw_text = self.format(raw_text)
|
||||
if rendered_nodes is None:
|
||||
rendered_nodes = [nodes.paragraph("", "", nodes.Text(raw_text))]
|
||||
return [
|
||||
nodes.warning("", *rendered_nodes),
|
||||
self.state.document.reporter.warning(raw_text, line=self.context['lineno'])
|
||||
]
|
||||
|
||||
def format(self, text):
|
||||
return text.format(**self.context)
|
||||
|
||||
|
||||
def create_warning(project_info, state, lineno, **kwargs):
|
||||
|
||||
tail = ''
|
||||
if project_info:
|
||||
tail = 'in doxygen xml output for project "{project}" from directory: {path}'.format(
|
||||
project=project_info.name(),
|
||||
path=project_info.project_path()
|
||||
)
|
||||
|
||||
context = dict(
|
||||
lineno=lineno,
|
||||
tail=tail,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
return WarningHandler(state, context)
|
||||
|
||||
|
||||
class BaseDirective(rst.Directive):
|
||||
|
||||
def __init__(self, root_data_object, renderer_factory_creator_constructor, finder_factory,
|
||||
project_info_factory, filter_factory, target_handler_factory, parser_factory, *args):
|
||||
rst.Directive.__init__(self, *args)
|
||||
self.directive_args = list(args) # Convert tuple to list to allow modification.
|
||||
|
||||
self.root_data_object = root_data_object
|
||||
self.renderer_factory_creator_constructor = renderer_factory_creator_constructor
|
||||
self.finder_factory = finder_factory
|
||||
self.project_info_factory = project_info_factory
|
||||
self.filter_factory = filter_factory
|
||||
self.target_handler_factory = target_handler_factory
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def render(self, node_stack, project_info, options, filter_, target_handler, mask_factory):
|
||||
"Standard render process used by subclasses"
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
options,
|
||||
target_handler
|
||||
)
|
||||
|
||||
try:
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
node_stack,
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
except ParserError as e:
|
||||
return format_parser_error("doxygenclass", e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error("doxygenclass", e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
context = RenderContext(node_stack, mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
return object_renderer.render()
|
Binary file not shown.
@ -1,123 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen.mask import NullMaskFactory
|
||||
from ..directive.base import BaseDirective
|
||||
from ..project import ProjectError
|
||||
from .base import create_warning
|
||||
|
||||
from docutils.parsers.rst.directives import unchanged_required, flag
|
||||
from docutils.parsers import rst
|
||||
|
||||
|
||||
class BaseFileDirective(BaseDirective):
|
||||
"""Base class handle the main work when given the appropriate file and project info to work
|
||||
from.
|
||||
"""
|
||||
|
||||
# We use inheritance here rather than a separate object and composition, because so much
|
||||
# information is present in the Directive class from the docutils framework that we'd have to
|
||||
# pass way too much stuff to a helper object to be reasonable.
|
||||
|
||||
def handle_contents(self, file_, project_info):
|
||||
|
||||
finder = self.finder_factory.create_finder(project_info)
|
||||
|
||||
finder_filter = self.filter_factory.create_file_finder_filter(file_)
|
||||
|
||||
matches = []
|
||||
finder.filter_(finder_filter, matches)
|
||||
|
||||
if len(matches) > 1:
|
||||
warning = create_warning(None, self.state, self.lineno, file=file_,
|
||||
directivename=self.directive_name)
|
||||
return warning.warn('{directivename}: Found multiple matches for file "{file} {tail}')
|
||||
|
||||
elif not matches:
|
||||
warning = create_warning(None, self.state, self.lineno, file=file_,
|
||||
directivename=self.directive_name)
|
||||
return warning.warn('{directivename}: Cannot find file "{file} {tail}')
|
||||
|
||||
target_handler = self.target_handler_factory.create_target_handler(
|
||||
self.options, project_info, self.state.document)
|
||||
filter_ = self.filter_factory.create_file_filter(file_, self.options)
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
self.options,
|
||||
target_handler
|
||||
)
|
||||
node_list = []
|
||||
for node_stack in matches:
|
||||
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
node_stack,
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
|
||||
mask_factory = NullMaskFactory()
|
||||
context = RenderContext(node_stack, mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
node_list.extend(object_renderer.render())
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
class DoxygenFileDirective(BaseFileDirective):
|
||||
|
||||
directive_name = 'doxygenfile'
|
||||
|
||||
required_arguments = 0
|
||||
optional_arguments = 2
|
||||
option_spec = {
|
||||
"path": unchanged_required,
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Get the file from the argument and the project info from the factory."""
|
||||
|
||||
file_ = self.arguments[0]
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.create_project_info(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('doxygenfile: %s' % e)
|
||||
|
||||
return self.handle_contents(file_, project_info)
|
||||
|
||||
|
||||
class AutoDoxygenFileDirective(BaseFileDirective):
|
||||
|
||||
directive_name = 'autodoxygenfile'
|
||||
|
||||
required_arguments = 1
|
||||
option_spec = {
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Get the file from the argument and extract the associated project info for the named
|
||||
project given that it is an auto-project.
|
||||
"""
|
||||
|
||||
file_ = self.arguments[0]
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.retrieve_project_info_for_auto(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('autodoxygenfile: %s' % e)
|
||||
|
||||
return self.handle_contents(file_, project_info)
|
||||
|
Binary file not shown.
@ -1,115 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen.mask import NullMaskFactory
|
||||
from ..renderer.rst.doxygen import format_parser_error
|
||||
from ..directive.base import BaseDirective
|
||||
from ..project import ProjectError
|
||||
from ..parser import ParserError, FileIOError
|
||||
from .base import create_warning
|
||||
|
||||
from docutils.parsers import rst
|
||||
from docutils.parsers.rst.directives import unchanged_required, flag
|
||||
|
||||
|
||||
class BaseIndexDirective(BaseDirective):
|
||||
"""Base class handle the main work when given the appropriate project info to work from.
|
||||
"""
|
||||
|
||||
# We use inheritance here rather than a separate object and composition, because so much
|
||||
# information is present in the Directive class from the docutils framework that we'd have to
|
||||
# pass way too much stuff to a helper object to be reasonable.
|
||||
|
||||
def handle_contents(self, project_info):
|
||||
|
||||
try:
|
||||
finder = self.finder_factory.create_finder(project_info)
|
||||
except ParserError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
data_object = finder.root()
|
||||
|
||||
target_handler = self.target_handler_factory.create_target_handler(
|
||||
self.options, project_info, self.state.document)
|
||||
filter_ = self.filter_factory.create_index_filter(self.options)
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
self.options,
|
||||
target_handler
|
||||
)
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
[data_object],
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
|
||||
mask_factory = NullMaskFactory()
|
||||
context = RenderContext([data_object, self.root_data_object], mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
|
||||
try:
|
||||
node_list = object_renderer.render()
|
||||
except ParserError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
class DoxygenIndexDirective(BaseIndexDirective):
|
||||
|
||||
required_arguments = 0
|
||||
optional_arguments = 2
|
||||
option_spec = {
|
||||
"path": unchanged_required,
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Extract the project info and pass it to the helper method"""
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.create_project_info(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('doxygenindex: %s' % e)
|
||||
|
||||
return self.handle_contents(project_info)
|
||||
|
||||
|
||||
class AutoDoxygenIndexDirective(BaseIndexDirective):
|
||||
|
||||
required_arguments = 0
|
||||
final_argument_whitespace = True
|
||||
option_spec = {
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Extract the project info from the auto project info store and pass it to the helper
|
||||
method
|
||||
"""
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.retrieve_project_info_for_auto(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('autodoxygenindex: %s' % e)
|
||||
|
||||
return self.handle_contents(project_info)
|
||||
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,3 +0,0 @@
|
||||
|
||||
class BreatheError(Exception):
|
||||
pass
|
Binary file not shown.
Binary file not shown.
@ -1,45 +0,0 @@
|
||||
|
||||
class FakeParentNode(object):
|
||||
|
||||
node_type = "fakeparent"
|
||||
|
||||
|
||||
class Finder(object):
|
||||
|
||||
def __init__(self, root, item_finder_factory):
|
||||
|
||||
self._root = root
|
||||
self.item_finder_factory = item_finder_factory
|
||||
|
||||
def filter_(self, filter_, matches):
|
||||
"""Adds all nodes which match the filter into the matches list"""
|
||||
|
||||
item_finder = self.item_finder_factory.create_finder(self._root)
|
||||
item_finder.filter_([FakeParentNode()], filter_, matches)
|
||||
|
||||
def root(self):
|
||||
|
||||
return self._root
|
||||
|
||||
|
||||
class FinderFactory(object):
|
||||
|
||||
def __init__(self, parser, item_finder_factory_creator):
|
||||
|
||||
self.parser = parser
|
||||
self.item_finder_factory_creator = item_finder_factory_creator
|
||||
|
||||
def create_finder(self, project_info):
|
||||
|
||||
root = self.parser.parse(project_info)
|
||||
item_finder_factory = self.item_finder_factory_creator.create_factory(project_info)
|
||||
|
||||
return Finder(root, item_finder_factory)
|
||||
|
||||
def create_finder_from_root(self, root, project_info):
|
||||
|
||||
item_finder_factory = self.item_finder_factory_creator.create_factory(project_info)
|
||||
|
||||
return Finder(root, item_finder_factory)
|
||||
|
||||
|
Binary file not shown.
@ -1 +0,0 @@
|
||||
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
|
||||
class ItemFinder(object):
|
||||
|
||||
def __init__(self, project_info, data_object, item_finder_factory):
|
||||
|
||||
self.data_object = data_object
|
||||
self.item_finder_factory = item_finder_factory
|
||||
self.project_info = project_info
|
||||
|
||||
|
||||
def stack(element, list_):
|
||||
"""Stack an element on to the start of a list and return as a new list"""
|
||||
|
||||
# Copy list first so we have a new list to insert into
|
||||
output = list_[:]
|
||||
output.insert(0, element)
|
||||
return output
|
||||
|
Binary file not shown.
@ -1,75 +0,0 @@
|
||||
|
||||
from .base import ItemFinder, stack
|
||||
|
||||
|
||||
class DoxygenTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
compound_finder = self.item_finder_factory.create_finder(self.data_object.compounddef)
|
||||
compound_finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class CompoundDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Finds nodes which match the filter and continues checks to children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
for sectiondef in self.data_object.sectiondef:
|
||||
finder = self.item_finder_factory.create_finder(sectiondef)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
for innerclass in self.data_object.innerclass:
|
||||
finder = self.item_finder_factory.create_finder(innerclass)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class SectionDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
for memberdef in self.data_object.memberdef:
|
||||
finder = self.item_finder_factory.create_finder(memberdef)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class MemberDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
data_object = self.data_object
|
||||
node_stack = stack(data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
if data_object.kind == 'enum':
|
||||
for value in data_object.enumvalue:
|
||||
value_stack = stack(value, node_stack)
|
||||
if filter_.allow(value_stack):
|
||||
matches.append(value_stack)
|
||||
|
||||
|
||||
class RefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
Binary file not shown.
@ -1,55 +0,0 @@
|
||||
|
||||
from . import index as indexfinder
|
||||
from . import compound as compoundfinder
|
||||
|
||||
|
||||
class CreateCompoundTypeSubFinder(object):
|
||||
|
||||
def __init__(self, parser_factory, matcher_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
self.matcher_factory = matcher_factory
|
||||
|
||||
def __call__(self, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return indexfinder.CompoundTypeSubItemFinder(self.matcher_factory, compound_parser,
|
||||
project_info, *args)
|
||||
|
||||
|
||||
class DoxygenItemFinderFactory(object):
|
||||
|
||||
def __init__(self, finders, project_info):
|
||||
|
||||
self.finders = finders
|
||||
self.project_info = project_info
|
||||
|
||||
def create_finder(self, data_object):
|
||||
|
||||
return self.finders[data_object.node_type](self.project_info, data_object, self)
|
||||
|
||||
|
||||
class DoxygenItemFinderFactoryCreator(object):
|
||||
|
||||
def __init__(self, parser_factory, filter_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
self.filter_factory = filter_factory
|
||||
|
||||
def create_factory(self, project_info):
|
||||
|
||||
finders = {
|
||||
"doxygen": indexfinder.DoxygenTypeSubItemFinder,
|
||||
"compound": CreateCompoundTypeSubFinder(self.parser_factory, self.filter_factory),
|
||||
"member": indexfinder.MemberTypeSubItemFinder,
|
||||
"doxygendef": compoundfinder.DoxygenTypeSubItemFinder,
|
||||
"compounddef": compoundfinder.CompoundDefTypeSubItemFinder,
|
||||
"sectiondef": compoundfinder.SectionDefTypeSubItemFinder,
|
||||
"memberdef": compoundfinder.MemberDefTypeSubItemFinder,
|
||||
"ref": compoundfinder.RefTypeSubItemFinder,
|
||||
}
|
||||
|
||||
return DoxygenItemFinderFactory(finders, project_info)
|
||||
|
||||
|
||||
|
Binary file not shown.
@ -1,79 +0,0 @@
|
||||
|
||||
from .base import ItemFinder, stack
|
||||
|
||||
|
||||
class DoxygenTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
compounds = self.data_object.get_compound()
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
for compound in compounds:
|
||||
|
||||
compound_finder = self.item_finder_factory.create_finder(compound)
|
||||
compound_finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class CompoundTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def __init__(self, filter_factory, compound_parser, *args):
|
||||
ItemFinder.__init__(self, *args)
|
||||
|
||||
self.filter_factory = filter_factory
|
||||
self.compound_parser = compound_parser
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Finds nodes which match the filter and continues checks to children
|
||||
|
||||
Requires parsing the xml files referenced by the children for which we use the compound
|
||||
parser and continue at the top level of that pretending that this node is the parent of the
|
||||
top level node of the compound file.
|
||||
"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
# Match against compound object
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
# Descend to member children
|
||||
members = self.data_object.get_member()
|
||||
member_matches = []
|
||||
for member in members:
|
||||
member_finder = self.item_finder_factory.create_finder(member)
|
||||
member_finder.filter_(node_stack, filter_, member_matches)
|
||||
|
||||
results = []
|
||||
|
||||
# If there are members in this compound that match the criteria
|
||||
# then load up the file for this compound and get the member data objects
|
||||
if member_matches:
|
||||
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
finder = self.item_finder_factory.create_finder(file_data)
|
||||
|
||||
for member_stack in member_matches:
|
||||
ref_filter = self.filter_factory.create_id_filter('memberdef', member_stack[0].refid)
|
||||
finder.filter_(node_stack, ref_filter, matches)
|
||||
|
||||
else:
|
||||
|
||||
# Read in the xml file referenced by the compound and descend into that as well
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
finder = self.item_finder_factory.create_finder(file_data)
|
||||
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class MemberTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
# Match against member object
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
Binary file not shown.
@ -1,118 +0,0 @@
|
||||
|
||||
import breathe.parser.doxygen.index
|
||||
import breathe.parser.doxygen.compound
|
||||
|
||||
class ParserError(Exception):
|
||||
|
||||
def __init__(self, error, filename):
|
||||
Exception.__init__(self, error)
|
||||
|
||||
self.error = error
|
||||
self.filename = filename
|
||||
|
||||
class FileIOError(Exception):
|
||||
|
||||
def __init__(self, error, filename):
|
||||
Exception.__init__(self, error)
|
||||
|
||||
self.error = error
|
||||
self.filename = filename
|
||||
|
||||
class Parser(object):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
|
||||
self.cache = cache
|
||||
self.path_handler = path_handler
|
||||
self.file_state_cache = file_state_cache
|
||||
|
||||
class DoxygenIndexParser(Parser):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
Parser.__init__(self, cache, path_handler, file_state_cache)
|
||||
|
||||
def parse(self, project_info):
|
||||
|
||||
filename = self.path_handler.resolve_path(
|
||||
project_info.project_path(),
|
||||
"index.xml"
|
||||
)
|
||||
|
||||
self.file_state_cache.update(filename)
|
||||
|
||||
try:
|
||||
# Try to get from our cache
|
||||
return self.cache[filename]
|
||||
except KeyError:
|
||||
|
||||
# If that fails, parse it afresh
|
||||
try:
|
||||
result = breathe.parser.doxygen.index.parse(filename)
|
||||
self.cache[filename] = result
|
||||
return result
|
||||
except breathe.parser.doxygen.index.ParseError as e:
|
||||
raise ParserError(e, filename)
|
||||
except breathe.parser.doxygen.index.FileIOError as e:
|
||||
raise FileIOError(e, filename)
|
||||
|
||||
class DoxygenCompoundParser(Parser):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache, project_info):
|
||||
Parser.__init__(self, cache, path_handler, file_state_cache)
|
||||
|
||||
self.project_info = project_info
|
||||
|
||||
def parse(self, refid):
|
||||
|
||||
filename = self.path_handler.resolve_path(
|
||||
self.project_info.project_path(),
|
||||
"%s.xml" % refid
|
||||
)
|
||||
|
||||
self.file_state_cache.update(filename)
|
||||
|
||||
try:
|
||||
# Try to get from our cache
|
||||
return self.cache[filename]
|
||||
except KeyError:
|
||||
|
||||
# If that fails, parse it afresh
|
||||
try:
|
||||
result = breathe.parser.doxygen.compound.parse(filename)
|
||||
self.cache[filename] = result
|
||||
return result
|
||||
except breathe.parser.doxygen.compound.ParseError as e:
|
||||
raise ParserError(e, filename)
|
||||
except breathe.parser.doxygen.compound.FileIOError as e:
|
||||
raise FileIOError(e, filename)
|
||||
|
||||
class CacheFactory(object):
|
||||
|
||||
def create_cache(self):
|
||||
|
||||
# Return basic dictionary as cache
|
||||
return {}
|
||||
|
||||
class DoxygenParserFactory(object):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
|
||||
self.cache = cache
|
||||
self.path_handler = path_handler
|
||||
self.file_state_cache = file_state_cache
|
||||
|
||||
def create_index_parser(self):
|
||||
|
||||
return DoxygenIndexParser(self.cache, self.path_handler, self.file_state_cache)
|
||||
|
||||
def create_compound_parser(self, project_info):
|
||||
|
||||
return DoxygenCompoundParser(
|
||||
self.cache,
|
||||
self.path_handler,
|
||||
self.file_state_cache,
|
||||
project_info
|
||||
)
|
||||
|
||||
|
||||
|
Binary file not shown.
Binary file not shown.
@ -1,964 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
|
||||
"""
|
||||
|
||||
from xml.dom import minidom
|
||||
from xml.dom import Node
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
from . import compoundsuper as supermod
|
||||
from .compoundsuper import MixedContainer
|
||||
|
||||
|
||||
class DoxygenTypeSub(supermod.DoxygenType):
|
||||
|
||||
node_type = "doxygendef"
|
||||
|
||||
def __init__(self, version=None, compounddef=None):
|
||||
supermod.DoxygenType.__init__(self, version, compounddef)
|
||||
supermod.DoxygenType.subclass = DoxygenTypeSub
|
||||
# end class DoxygenTypeSub
|
||||
|
||||
|
||||
class compounddefTypeSub(supermod.compounddefType):
|
||||
|
||||
node_type = "compounddef"
|
||||
|
||||
def __init__(self, kind=None, prot=None, id=None, compoundname='', title='',
|
||||
basecompoundref=None, derivedcompoundref=None, includes=None, includedby=None,
|
||||
incdepgraph=None, invincdepgraph=None, innerdir=None, innerfile=None,
|
||||
innerclass=None, innernamespace=None, innerpage=None, innergroup=None,
|
||||
templateparamlist=None, sectiondef=None, briefdescription=None,
|
||||
detaileddescription=None, inheritancegraph=None, collaborationgraph=None,
|
||||
programlisting=None, location=None, listofallmembers=None):
|
||||
|
||||
supermod.compounddefType.__init__(self, kind, prot, id, compoundname, title,
|
||||
basecompoundref, derivedcompoundref, includes, includedby,
|
||||
incdepgraph, invincdepgraph, innerdir, innerfile,
|
||||
innerclass, innernamespace, innerpage, innergroup,
|
||||
templateparamlist, sectiondef, briefdescription,
|
||||
detaileddescription, inheritancegraph, collaborationgraph,
|
||||
programlisting, location, listofallmembers)
|
||||
|
||||
supermod.compounddefType.subclass = compounddefTypeSub
|
||||
# end class compounddefTypeSub
|
||||
|
||||
|
||||
class listofallmembersTypeSub(supermod.listofallmembersType):
|
||||
|
||||
node_type = "listofallmembers"
|
||||
|
||||
def __init__(self, member=None):
|
||||
supermod.listofallmembersType.__init__(self, member)
|
||||
supermod.listofallmembersType.subclass = listofallmembersTypeSub
|
||||
# end class listofallmembersTypeSub
|
||||
|
||||
|
||||
class memberRefTypeSub(supermod.memberRefType):
|
||||
|
||||
node_type = "memberref"
|
||||
|
||||
def __init__(self, virt=None, prot=None, refid=None, ambiguityscope=None, scope='', name=''):
|
||||
supermod.memberRefType.__init__(self, virt, prot, refid, ambiguityscope, scope, name)
|
||||
supermod.memberRefType.subclass = memberRefTypeSub
|
||||
# end class memberRefTypeSub
|
||||
|
||||
|
||||
class compoundRefTypeSub(supermod.compoundRefType):
|
||||
|
||||
node_type = "compoundref"
|
||||
|
||||
def __init__(self, virt=None, prot=None, refid=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.compoundRefType.__init__(self, mixedclass_, content_)
|
||||
supermod.compoundRefType.subclass = compoundRefTypeSub
|
||||
# end class compoundRefTypeSub
|
||||
|
||||
|
||||
class reimplementTypeSub(supermod.reimplementType):
|
||||
|
||||
node_type = "reimplement"
|
||||
|
||||
def __init__(self, refid=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.reimplementType.__init__(self, mixedclass_, content_)
|
||||
supermod.reimplementType.subclass = reimplementTypeSub
|
||||
# end class reimplementTypeSub
|
||||
|
||||
|
||||
class incTypeSub(supermod.incType):
|
||||
|
||||
node_type = "inc"
|
||||
|
||||
def __init__(self, local=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.incType.__init__(self, mixedclass_, content_)
|
||||
supermod.incType.subclass = incTypeSub
|
||||
# end class incTypeSub
|
||||
|
||||
|
||||
class refTypeSub(supermod.refType):
|
||||
|
||||
node_type = "ref"
|
||||
|
||||
def __init__(self, node_name, prot=None, refid=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.refType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.node_name = node_name
|
||||
|
||||
supermod.refType.subclass = refTypeSub
|
||||
|
||||
|
||||
class refTextTypeSub(supermod.refTextType):
|
||||
|
||||
node_type = "reftex"
|
||||
|
||||
def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.refTextType.__init__(self, mixedclass_, content_)
|
||||
|
||||
supermod.refTextType.subclass = refTextTypeSub
|
||||
# end class refTextTypeSub
|
||||
|
||||
|
||||
class sectiondefTypeSub(supermod.sectiondefType):
|
||||
|
||||
node_type = "sectiondef"
|
||||
|
||||
def __init__(self, kind=None, header='', description=None, memberdef=None):
|
||||
supermod.sectiondefType.__init__(self, kind, header, description, memberdef)
|
||||
|
||||
supermod.sectiondefType.subclass = sectiondefTypeSub
|
||||
# end class sectiondefTypeSub
|
||||
|
||||
|
||||
class memberdefTypeSub(supermod.memberdefType):
|
||||
|
||||
node_type = "memberdef"
|
||||
|
||||
def __init__(self, initonly=None, kind=None, volatile=None, const=None, raise_=None, virt=None,
|
||||
readable=None, prot=None, explicit=None, new=None, final=None, writable=None,
|
||||
add=None, static=None, remove=None, sealed=None, mutable=None, gettable=None,
|
||||
inline=None, settable=None, id=None, templateparamlist=None, type_=None,
|
||||
definition='', argsstring='', name='', read='', write='', bitfield='',
|
||||
reimplements=None, reimplementedby=None, param=None, enumvalue=None,
|
||||
initializer=None, exceptions=None, briefdescription=None, detaileddescription=None,
|
||||
inbodydescription=None, location=None, references=None, referencedby=None):
|
||||
|
||||
supermod.memberdefType.__init__(self, initonly, kind, volatile, const, raise_, virt,
|
||||
readable, prot, explicit, new, final, writable, add, static,
|
||||
remove, sealed, mutable, gettable, inline, settable, id,
|
||||
templateparamlist, type_, definition, argsstring, name,
|
||||
read, write, bitfield, reimplements, reimplementedby, param,
|
||||
enumvalue, initializer, exceptions, briefdescription,
|
||||
detaileddescription, inbodydescription, location,
|
||||
references, referencedby)
|
||||
|
||||
self.parameterlist = supermod.docParamListType.factory()
|
||||
self.parameterlist.kind = "param"
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.memberdefType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'param':
|
||||
|
||||
# Get latest param
|
||||
param = self.param[-1]
|
||||
|
||||
# If it doesn't have a description we're done
|
||||
if not param.briefdescription:
|
||||
return
|
||||
|
||||
# Construct our own param list from the descriptions stored inline
|
||||
# with the parameters
|
||||
paramdescription = param.briefdescription
|
||||
paramname = supermod.docParamName.factory()
|
||||
|
||||
# Add parameter name
|
||||
obj_ = paramname.mixedclass_(MixedContainer.CategoryText, MixedContainer.TypeNone, '',
|
||||
param.declname)
|
||||
paramname.content_.append(obj_)
|
||||
|
||||
paramnamelist = supermod.docParamNameList.factory()
|
||||
paramnamelist.parametername.append(paramname)
|
||||
|
||||
paramlistitem = supermod.docParamListItem.factory()
|
||||
paramlistitem.parameternamelist.append(paramnamelist)
|
||||
|
||||
# Add parameter description
|
||||
paramlistitem.parameterdescription = paramdescription
|
||||
|
||||
self.parameterlist.parameteritem.append(paramlistitem)
|
||||
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'detaileddescription':
|
||||
|
||||
if not self.parameterlist.parameteritem:
|
||||
# No items in our list
|
||||
return
|
||||
|
||||
# Assume supermod.memberdefType.buildChildren has already built the
|
||||
# description object, we just want to slot our parameterlist in at
|
||||
# a reasonable point
|
||||
|
||||
if not self.detaileddescription:
|
||||
# Create one if it doesn't exist
|
||||
self.detaileddescription = supermod.descriptionType.factory()
|
||||
|
||||
detaileddescription = self.detaileddescription
|
||||
|
||||
para = supermod.docParaType.factory()
|
||||
para.parameterlist.append(self.parameterlist)
|
||||
|
||||
obj_ = detaileddescription.mixedclass_(MixedContainer.CategoryComplex,
|
||||
MixedContainer.TypeNone, 'para', para)
|
||||
|
||||
index = 0
|
||||
detaileddescription.content_.insert(index, obj_)
|
||||
|
||||
|
||||
supermod.memberdefType.subclass = memberdefTypeSub
|
||||
# end class memberdefTypeSub
|
||||
|
||||
|
||||
class descriptionTypeSub(supermod.descriptionType):
|
||||
|
||||
node_type = "description"
|
||||
|
||||
def __init__(self, title='', para=None, sect1=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.descriptionType.__init__(self, mixedclass_, content_)
|
||||
|
||||
supermod.descriptionType.subclass = descriptionTypeSub
|
||||
# end class descriptionTypeSub
|
||||
|
||||
|
||||
class enumvalueTypeSub(supermod.enumvalueType):
|
||||
|
||||
node_type = "enumvalue"
|
||||
|
||||
def __init__(self, prot=None, id=None, name='', initializer=None, briefdescription=None,
|
||||
detaileddescription=None, mixedclass_=None, content_=None):
|
||||
supermod.enumvalueType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.initializer = None
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
# Get text from <name> child and put it in self.name
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'name':
|
||||
value_ = []
|
||||
for text_ in child_.childNodes:
|
||||
value_.append(text_.nodeValue)
|
||||
valuestr_ = ''.join(value_)
|
||||
self.name = valuestr_
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'briefdescription':
|
||||
obj_ = supermod.descriptionType.factory()
|
||||
obj_.build(child_)
|
||||
self.set_briefdescription(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'detaileddescription':
|
||||
obj_ = supermod.descriptionType.factory()
|
||||
obj_.build(child_)
|
||||
self.set_detaileddescription(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'initializer':
|
||||
childobj_ = supermod.linkedTextType.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'initializer', childobj_)
|
||||
self.set_initializer(obj_)
|
||||
self.content_.append(obj_)
|
||||
|
||||
supermod.enumvalueType.subclass = enumvalueTypeSub
|
||||
# end class enumvalueTypeSub
|
||||
|
||||
|
||||
class templateparamlistTypeSub(supermod.templateparamlistType):
|
||||
|
||||
node_type = "templateparamlist"
|
||||
|
||||
def __init__(self, param=None):
|
||||
supermod.templateparamlistType.__init__(self, param)
|
||||
supermod.templateparamlistType.subclass = templateparamlistTypeSub
|
||||
# end class templateparamlistTypeSub
|
||||
|
||||
|
||||
class paramTypeSub(supermod.paramType):
|
||||
|
||||
node_type = "param"
|
||||
|
||||
def __init__(self, type_=None, declname='', defname='', array='', defval=None,
|
||||
briefdescription=None):
|
||||
supermod.paramType.__init__(self, type_, declname, defname, array, defval, briefdescription)
|
||||
supermod.paramType.subclass = paramTypeSub
|
||||
# end class paramTypeSub
|
||||
|
||||
|
||||
class linkedTextTypeSub(supermod.linkedTextType):
|
||||
|
||||
node_type = "linkedtext"
|
||||
|
||||
def __init__(self, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.linkedTextType.__init__(self, mixedclass_, content_)
|
||||
supermod.linkedTextType.subclass = linkedTextTypeSub
|
||||
# end class linkedTextTypeSub
|
||||
|
||||
|
||||
class graphTypeSub(supermod.graphType):
|
||||
|
||||
node_type = "graph"
|
||||
|
||||
def __init__(self, node=None):
|
||||
supermod.graphType.__init__(self, node)
|
||||
supermod.graphType.subclass = graphTypeSub
|
||||
# end class graphTypeSub
|
||||
|
||||
|
||||
class nodeTypeSub(supermod.nodeType):
|
||||
|
||||
node_type = "node"
|
||||
|
||||
def __init__(self, id=None, label='', link=None, childnode=None):
|
||||
supermod.nodeType.__init__(self, id, label, link, childnode)
|
||||
supermod.nodeType.subclass = nodeTypeSub
|
||||
# end class nodeTypeSub
|
||||
|
||||
|
||||
class childnodeTypeSub(supermod.childnodeType):
|
||||
|
||||
node_type = "childnode"
|
||||
|
||||
def __init__(self, relation=None, refid=None, edgelabel=None):
|
||||
supermod.childnodeType.__init__(self, relation, refid, edgelabel)
|
||||
supermod.childnodeType.subclass = childnodeTypeSub
|
||||
# end class childnodeTypeSub
|
||||
|
||||
|
||||
class linkTypeSub(supermod.linkType):
|
||||
|
||||
node_type = "link"
|
||||
|
||||
def __init__(self, refid=None, external=None, valueOf_=''):
|
||||
supermod.linkType.__init__(self, refid, external)
|
||||
supermod.linkType.subclass = linkTypeSub
|
||||
# end class linkTypeSub
|
||||
|
||||
|
||||
class listingTypeSub(supermod.listingType):
|
||||
|
||||
node_type = "listing"
|
||||
|
||||
def __init__(self, codeline=None):
|
||||
supermod.listingType.__init__(self, codeline)
|
||||
supermod.listingType.subclass = listingTypeSub
|
||||
# end class listingTypeSub
|
||||
|
||||
|
||||
class codelineTypeSub(supermod.codelineType):
|
||||
|
||||
node_type = "codeline"
|
||||
|
||||
def __init__(self, external=None, lineno=None, refkind=None, refid=None, highlight=None):
|
||||
supermod.codelineType.__init__(self, external, lineno, refkind, refid, highlight)
|
||||
supermod.codelineType.subclass = codelineTypeSub
|
||||
# end class codelineTypeSub
|
||||
|
||||
|
||||
class highlightTypeSub(supermod.highlightType):
|
||||
|
||||
node_type = "highlight"
|
||||
|
||||
def __init__(self, class_=None, sp=None, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.highlightType.__init__(self, mixedclass_, content_)
|
||||
supermod.highlightType.subclass = highlightTypeSub
|
||||
# end class highlightTypeSub
|
||||
|
||||
|
||||
class referenceTypeSub(supermod.referenceType):
|
||||
|
||||
node_type = "reference"
|
||||
|
||||
def __init__(self, endline=None, startline=None, refid=None, compoundref=None, valueOf_='',
|
||||
mixedclass_=None, content_=None):
|
||||
supermod.referenceType.__init__(self, mixedclass_, content_)
|
||||
supermod.referenceType.subclass = referenceTypeSub
|
||||
# end class referenceTypeSub
|
||||
|
||||
|
||||
class locationTypeSub(supermod.locationType):
|
||||
|
||||
node_type = "location"
|
||||
|
||||
def __init__(self, bodystart=None, line=None, bodyend=None, bodyfile=None, file=None,
|
||||
valueOf_=''):
|
||||
supermod.locationType.__init__(self, bodystart, line, bodyend, bodyfile, file)
|
||||
supermod.locationType.subclass = locationTypeSub
|
||||
# end class locationTypeSub
|
||||
|
||||
|
||||
class docSect1TypeSub(supermod.docSect1Type):
|
||||
|
||||
node_type = "docsect1"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect2=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect1Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect1Type.subclass = docSect1TypeSub
|
||||
# end class docSect1TypeSub
|
||||
|
||||
|
||||
class docSect2TypeSub(supermod.docSect2Type):
|
||||
|
||||
node_type = "docsect2"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect3=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect2Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect2Type.subclass = docSect2TypeSub
|
||||
# end class docSect2TypeSub
|
||||
|
||||
|
||||
class docSect3TypeSub(supermod.docSect3Type):
|
||||
|
||||
node_type = "docsect3"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect4=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect3Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect3Type.subclass = docSect3TypeSub
|
||||
# end class docSect3TypeSub
|
||||
|
||||
|
||||
class docSect4TypeSub(supermod.docSect4Type):
|
||||
|
||||
node_type = "docsect4"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect4Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect4Type.subclass = docSect4TypeSub
|
||||
# end class docSect4TypeSub
|
||||
|
||||
|
||||
class docInternalTypeSub(supermod.docInternalType):
|
||||
|
||||
node_type = "docinternal"
|
||||
|
||||
def __init__(self, para=None, sect1=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalType.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalType.subclass = docInternalTypeSub
|
||||
# end class docInternalTypeSub
|
||||
|
||||
|
||||
class docInternalS1TypeSub(supermod.docInternalS1Type):
|
||||
|
||||
node_type = "docinternals1"
|
||||
|
||||
def __init__(self, para=None, sect2=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS1Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS1Type.subclass = docInternalS1TypeSub
|
||||
# end class docInternalS1TypeSub
|
||||
|
||||
|
||||
class docInternalS2TypeSub(supermod.docInternalS2Type):
|
||||
|
||||
node_type = "docinternals2"
|
||||
|
||||
def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS2Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS2Type.subclass = docInternalS2TypeSub
|
||||
# end class docInternalS2TypeSub
|
||||
|
||||
|
||||
class docInternalS3TypeSub(supermod.docInternalS3Type):
|
||||
|
||||
node_type = "docinternals3"
|
||||
|
||||
def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS3Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS3Type.subclass = docInternalS3TypeSub
|
||||
# end class docInternalS3TypeSub
|
||||
|
||||
|
||||
class docInternalS4TypeSub(supermod.docInternalS4Type):
|
||||
|
||||
node_type = "docinternals4"
|
||||
|
||||
def __init__(self, para=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS4Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS4Type.subclass = docInternalS4TypeSub
|
||||
# end class docInternalS4TypeSub
|
||||
|
||||
|
||||
class docURLLinkSub(supermod.docURLLink):
|
||||
|
||||
node_type = "docurllink"
|
||||
|
||||
def __init__(self, url=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docURLLink.__init__(self, mixedclass_, content_)
|
||||
supermod.docURLLink.subclass = docURLLinkSub
|
||||
# end class docURLLinkSub
|
||||
|
||||
|
||||
class docAnchorTypeSub(supermod.docAnchorType):
|
||||
|
||||
node_type = "docanchor"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docAnchorType.__init__(self, mixedclass_, content_)
|
||||
supermod.docAnchorType.subclass = docAnchorTypeSub
|
||||
# end class docAnchorTypeSub
|
||||
|
||||
|
||||
class docFormulaTypeSub(supermod.docFormulaType):
|
||||
|
||||
node_type = "docformula"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docFormulaType.__init__(self, mixedclass_, content_)
|
||||
supermod.docFormulaType.subclass = docFormulaTypeSub
|
||||
# end class docFormulaTypeSub
|
||||
|
||||
|
||||
class docIndexEntryTypeSub(supermod.docIndexEntryType):
|
||||
|
||||
node_type = "docindexentry"
|
||||
|
||||
def __init__(self, primaryie='', secondaryie=''):
|
||||
supermod.docIndexEntryType.__init__(self, primaryie, secondaryie)
|
||||
supermod.docIndexEntryType.subclass = docIndexEntryTypeSub
|
||||
# end class docIndexEntryTypeSub
|
||||
|
||||
|
||||
class docListTypeSub(supermod.docListType):
|
||||
|
||||
node_type = "doclist"
|
||||
|
||||
def __init__(self, listitem=None, subtype=""):
|
||||
self.node_subtype = "itemized"
|
||||
if subtype is not "":
|
||||
self.node_subtype = subtype
|
||||
supermod.docListType.__init__(self, listitem)
|
||||
supermod.docListType.subclass = docListTypeSub
|
||||
# end class docListTypeSub
|
||||
|
||||
|
||||
class docListItemTypeSub(supermod.docListItemType):
|
||||
|
||||
node_type = "doclistitem"
|
||||
|
||||
def __init__(self, para=None):
|
||||
supermod.docListItemType.__init__(self, para)
|
||||
supermod.docListItemType.subclass = docListItemTypeSub
|
||||
# end class docListItemTypeSub
|
||||
|
||||
|
||||
class docSimpleSectTypeSub(supermod.docSimpleSectType):
|
||||
|
||||
node_type = "docsimplesect"
|
||||
|
||||
def __init__(self, kind=None, title=None, para=None):
|
||||
supermod.docSimpleSectType.__init__(self, kind, title, para)
|
||||
supermod.docSimpleSectType.subclass = docSimpleSectTypeSub
|
||||
# end class docSimpleSectTypeSub
|
||||
|
||||
|
||||
class docVarListEntryTypeSub(supermod.docVarListEntryType):
|
||||
|
||||
node_type = "docvarlistentry"
|
||||
|
||||
def __init__(self, term=None):
|
||||
supermod.docVarListEntryType.__init__(self, term)
|
||||
supermod.docVarListEntryType.subclass = docVarListEntryTypeSub
|
||||
# end class docVarListEntryTypeSub
|
||||
|
||||
|
||||
class docRefTextTypeSub(supermod.docRefTextType):
|
||||
|
||||
node_type = "docreftext"
|
||||
|
||||
def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docRefTextType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.para = []
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docRefTextType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'para':
|
||||
obj_ = supermod.docParaType.factory()
|
||||
obj_.build(child_)
|
||||
self.para.append(obj_)
|
||||
|
||||
supermod.docRefTextType.subclass = docRefTextTypeSub
|
||||
# end class docRefTextTypeSub
|
||||
|
||||
|
||||
class docTableTypeSub(supermod.docTableType):
|
||||
|
||||
node_type = "doctable"
|
||||
|
||||
def __init__(self, rows=None, cols=None, row=None, caption=None):
|
||||
supermod.docTableType.__init__(self, rows, cols, row, caption)
|
||||
supermod.docTableType.subclass = docTableTypeSub
|
||||
# end class docTableTypeSub
|
||||
|
||||
|
||||
class docRowTypeSub(supermod.docRowType):
|
||||
|
||||
node_type = "docrow"
|
||||
|
||||
def __init__(self, entry=None):
|
||||
supermod.docRowType.__init__(self, entry)
|
||||
supermod.docRowType.subclass = docRowTypeSub
|
||||
# end class docRowTypeSub
|
||||
|
||||
|
||||
class docEntryTypeSub(supermod.docEntryType):
|
||||
|
||||
node_type = "docentry"
|
||||
|
||||
def __init__(self, thead=None, para=None):
|
||||
supermod.docEntryType.__init__(self, thead, para)
|
||||
supermod.docEntryType.subclass = docEntryTypeSub
|
||||
# end class docEntryTypeSub
|
||||
|
||||
|
||||
class docHeadingTypeSub(supermod.docHeadingType):
|
||||
|
||||
node_type = "docheading"
|
||||
|
||||
def __init__(self, level=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docHeadingType.__init__(self, mixedclass_, content_)
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docHeadingType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
# Account for styled content in the heading. This might need to be expanded to include other
|
||||
# nodes as it seems from the xsd that headings can have a lot of different children but we
|
||||
# really don't expect most of them to come up.
|
||||
if child_.nodeType == Node.ELEMENT_NODE and (
|
||||
nodeName_ == 'bold' or
|
||||
nodeName_ == 'emphasis' or
|
||||
nodeName_ == 'computeroutput' or
|
||||
nodeName_ == 'subscript' or
|
||||
nodeName_ == 'superscript' or
|
||||
nodeName_ == 'center' or
|
||||
nodeName_ == 'small'):
|
||||
obj_ = supermod.docMarkupType.factory()
|
||||
obj_.build(child_)
|
||||
obj_.type_ = nodeName_
|
||||
self.content_.append(obj_)
|
||||
|
||||
supermod.docHeadingType.subclass = docHeadingTypeSub
|
||||
# end class docHeadingTypeSub
|
||||
|
||||
|
||||
class docImageTypeSub(supermod.docImageType):
|
||||
|
||||
node_type = "docimage"
|
||||
|
||||
def __init__(self, width=None, type_=None, name=None, height=None, valueOf_='',
|
||||
mixedclass_=None, content_=None):
|
||||
supermod.docImageType.__init__(self, mixedclass_, content_)
|
||||
supermod.docImageType.subclass = docImageTypeSub
|
||||
# end class docImageTypeSub
|
||||
|
||||
|
||||
class docDotFileTypeSub(supermod.docDotFileType):
|
||||
|
||||
node_type = "docdocfile"
|
||||
|
||||
def __init__(self, name=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docDotFileType.__init__(self, mixedclass_, content_)
|
||||
supermod.docDotFileType.subclass = docDotFileTypeSub
|
||||
# end class docDotFileTypeSub
|
||||
|
||||
|
||||
class docTocItemTypeSub(supermod.docTocItemType):
|
||||
|
||||
node_type = "doctocitem"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docTocItemType.__init__(self, mixedclass_, content_)
|
||||
supermod.docTocItemType.subclass = docTocItemTypeSub
|
||||
# end class docTocItemTypeSub
|
||||
|
||||
|
||||
class docTocListTypeSub(supermod.docTocListType):
|
||||
|
||||
node_type = "doctoclist"
|
||||
|
||||
def __init__(self, tocitem=None):
|
||||
supermod.docTocListType.__init__(self, tocitem)
|
||||
supermod.docTocListType.subclass = docTocListTypeSub
|
||||
# end class docTocListTypeSub
|
||||
|
||||
|
||||
class docLanguageTypeSub(supermod.docLanguageType):
|
||||
|
||||
node_type = "doclanguage"
|
||||
|
||||
def __init__(self, langid=None, para=None):
|
||||
supermod.docLanguageType.__init__(self, langid, para)
|
||||
supermod.docLanguageType.subclass = docLanguageTypeSub
|
||||
# end class docLanguageTypeSub
|
||||
|
||||
|
||||
class docParamListTypeSub(supermod.docParamListType):
|
||||
|
||||
node_type = "docparamlist"
|
||||
|
||||
def __init__(self, kind=None, parameteritem=None):
|
||||
supermod.docParamListType.__init__(self, kind, parameteritem)
|
||||
supermod.docParamListType.subclass = docParamListTypeSub
|
||||
# end class docParamListTypeSub
|
||||
|
||||
|
||||
class docParamListItemSub(supermod.docParamListItem):
|
||||
|
||||
node_type = "docparamlistitem"
|
||||
|
||||
def __init__(self, parameternamelist=None, parameterdescription=None):
|
||||
supermod.docParamListItem.__init__(self, parameternamelist, parameterdescription)
|
||||
supermod.docParamListItem.subclass = docParamListItemSub
|
||||
# end class docParamListItemSub
|
||||
|
||||
|
||||
class docParamNameListSub(supermod.docParamNameList):
|
||||
|
||||
node_type = "docparamnamelist"
|
||||
|
||||
def __init__(self, parametername=None):
|
||||
supermod.docParamNameList.__init__(self, parametername)
|
||||
supermod.docParamNameList.subclass = docParamNameListSub
|
||||
# end class docParamNameListSub
|
||||
|
||||
|
||||
class docParamNameSub(supermod.docParamName):
|
||||
|
||||
node_type = "docparamname"
|
||||
|
||||
def __init__(self, direction=None, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.docParamName.__init__(self, mixedclass_, content_)
|
||||
supermod.docParamName.subclass = docParamNameSub
|
||||
# end class docParamNameSub
|
||||
|
||||
|
||||
class docXRefSectTypeSub(supermod.docXRefSectType):
|
||||
|
||||
node_type = "docxrefsect"
|
||||
|
||||
def __init__(self, id=None, xreftitle=None, xrefdescription=None):
|
||||
supermod.docXRefSectType.__init__(self, id, xreftitle, xrefdescription)
|
||||
supermod.docXRefSectType.subclass = docXRefSectTypeSub
|
||||
# end class docXRefSectTypeSub
|
||||
|
||||
|
||||
class docCopyTypeSub(supermod.docCopyType):
|
||||
|
||||
node_type = "doccopy"
|
||||
|
||||
def __init__(self, link=None, para=None, sect1=None, internal=None):
|
||||
supermod.docCopyType.__init__(self, link, para, sect1, internal)
|
||||
supermod.docCopyType.subclass = docCopyTypeSub
|
||||
# end class docCopyTypeSub
|
||||
|
||||
|
||||
class docCharTypeSub(supermod.docCharType):
|
||||
|
||||
node_type = "docchar"
|
||||
|
||||
def __init__(self, char=None, valueOf_=''):
|
||||
supermod.docCharType.__init__(self, char)
|
||||
supermod.docCharType.subclass = docCharTypeSub
|
||||
# end class docCharTypeSub
|
||||
|
||||
|
||||
class verbatimTypeSub(object):
|
||||
"""
|
||||
New node type. Structure is largely pillaged from other nodes in order to
|
||||
match the set.
|
||||
"""
|
||||
|
||||
node_type = "verbatim"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
if mixedclass_ is None:
|
||||
self.mixedclass_ = MixedContainer
|
||||
else:
|
||||
self.mixedclass_ = mixedclass_
|
||||
if content_ is None:
|
||||
self.content_ = []
|
||||
else:
|
||||
self.content_ = content_
|
||||
self.text = ""
|
||||
|
||||
def factory(*args, **kwargs):
|
||||
return verbatimTypeSub(*args, **kwargs)
|
||||
|
||||
factory = staticmethod(factory)
|
||||
|
||||
def buildAttributes(self, attrs):
|
||||
pass
|
||||
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
self.valueOf_ = ''
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
self.text += child_.nodeValue
|
||||
|
||||
|
||||
class docParaTypeSub(supermod.docParaType):
|
||||
|
||||
node_type = "docpara"
|
||||
|
||||
def __init__(self, char=None, valueOf_=''):
|
||||
supermod.docParaType.__init__(self, char)
|
||||
|
||||
self.parameterlist = []
|
||||
self.simplesects = []
|
||||
self.content = []
|
||||
self.programlisting = []
|
||||
self.images = []
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docParaType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryText,
|
||||
MixedContainer.TypeNone, '', child_.nodeValue)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "ref":
|
||||
obj_ = supermod.docRefTextType.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'parameterlist':
|
||||
obj_ = supermod.docParamListType.factory()
|
||||
obj_.build(child_)
|
||||
self.parameterlist.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'simplesect':
|
||||
obj_ = supermod.docSimpleSectType.factory()
|
||||
obj_.build(child_)
|
||||
self.simplesects.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'programlisting':
|
||||
obj_ = supermod.listingType.factory()
|
||||
obj_.build(child_)
|
||||
self.programlisting.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'image':
|
||||
obj_ = supermod.docImageType.factory()
|
||||
obj_.build(child_)
|
||||
self.images.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and (
|
||||
nodeName_ == 'bold' or
|
||||
nodeName_ == 'emphasis' or
|
||||
nodeName_ == 'computeroutput' or
|
||||
nodeName_ == 'subscript' or
|
||||
nodeName_ == 'superscript' or
|
||||
nodeName_ == 'center' or
|
||||
nodeName_ == 'small'):
|
||||
obj_ = supermod.docMarkupType.factory()
|
||||
obj_.build(child_)
|
||||
obj_.type_ = nodeName_
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'verbatim':
|
||||
childobj_ = verbatimTypeSub.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'verbatim', childobj_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'formula':
|
||||
childobj_ = docFormulaTypeSub.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'formula', childobj_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "itemizedlist":
|
||||
obj_ = supermod.docListType.factory(subtype="itemized")
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "orderedlist":
|
||||
obj_ = supermod.docListType.factory(subtype="ordered")
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'heading':
|
||||
obj_ = supermod.docHeadingType.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'ulink':
|
||||
obj_ = supermod.docURLLink.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
|
||||
supermod.docParaType.subclass = docParaTypeSub
|
||||
# end class docParaTypeSub
|
||||
|
||||
|
||||
class docMarkupTypeSub(supermod.docMarkupType):
|
||||
|
||||
node_type = "docmarkup"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docMarkupType.__init__(self, valueOf_, mixedclass_, content_)
|
||||
self.type_ = None
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryText, MixedContainer.TypeNone, '',
|
||||
child_.nodeValue)
|
||||
self.content_.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'ref':
|
||||
childobj_ = supermod.docRefTextType.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone, 'ref',
|
||||
childobj_)
|
||||
self.content_.append(obj_)
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
self.valueOf_ += child_.nodeValue
|
||||
elif child_.nodeType == Node.CDATA_SECTION_NODE:
|
||||
self.valueOf_ += '![CDATA[' + child_.nodeValue + ']]'
|
||||
|
||||
supermod.docMarkupType.subclass = docMarkupTypeSub
|
||||
# end class docMarkupTypeSub
|
||||
|
||||
|
||||
class docTitleTypeSub(supermod.docTitleType):
|
||||
|
||||
node_type = "doctitle"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docTitleType.__init__(self, valueOf_, mixedclass_, content_)
|
||||
self.type_ = None
|
||||
|
||||
supermod.docTitleType.subclass = docTitleTypeSub
|
||||
# end class docTitleTypeSub
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FileIOError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def parse(inFilename):
|
||||
|
||||
try:
|
||||
doc = minidom.parse(inFilename)
|
||||
except IOError as e:
|
||||
raise FileIOError(e)
|
||||
except ExpatError as e:
|
||||
raise ParseError(e)
|
||||
|
||||
rootNode = doc.documentElement
|
||||
rootObj = supermod.DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
return rootObj
|
||||
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
|
||||
"""
|
||||
|
||||
from xml.dom import minidom
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
|
||||
from . import indexsuper as supermod
|
||||
|
||||
class DoxygenTypeSub(supermod.DoxygenType):
|
||||
|
||||
node_type = "doxygen"
|
||||
|
||||
def __init__(self, version=None, compound=None):
|
||||
supermod.DoxygenType.__init__(self, version, compound)
|
||||
supermod.DoxygenType.subclass = DoxygenTypeSub
|
||||
# end class DoxygenTypeSub
|
||||
|
||||
|
||||
class CompoundTypeSub(supermod.CompoundType):
|
||||
|
||||
node_type = "compound"
|
||||
|
||||
def __init__(self, kind=None, refid=None, name='', member=None):
|
||||
supermod.CompoundType.__init__(self, kind, refid, name, member)
|
||||
supermod.CompoundType.subclass = CompoundTypeSub
|
||||
# end class CompoundTypeSub
|
||||
|
||||
|
||||
class MemberTypeSub(supermod.MemberType):
|
||||
|
||||
node_type = "member"
|
||||
|
||||
def __init__(self, kind=None, refid=None, name=''):
|
||||
supermod.MemberType.__init__(self, kind, refid, name)
|
||||
supermod.MemberType.subclass = MemberTypeSub
|
||||
# end class MemberTypeSub
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
class FileIOError(Exception):
|
||||
pass
|
||||
|
||||
def parse(inFilename):
|
||||
|
||||
try:
|
||||
doc = minidom.parse(inFilename)
|
||||
except IOError as e:
|
||||
raise FileIOError(e)
|
||||
except ExpatError as e:
|
||||
raise ParseError(e)
|
||||
|
||||
rootNode = doc.documentElement
|
||||
rootObj = supermod.DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
|
||||
return rootObj
|
||||
|
Binary file not shown.
@ -1,362 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# Generated Thu Jun 11 18:43:54 2009 by generateDS.py.
|
||||
#
|
||||
|
||||
import sys
|
||||
import getopt
|
||||
from xml.dom import minidom
|
||||
from xml.dom import Node
|
||||
|
||||
#
|
||||
# User methods
|
||||
#
|
||||
# Calls to the methods in these classes are generated by generateDS.py.
|
||||
# You can replace these methods by re-implementing the following class
|
||||
# in a module named generatedssuper.py.
|
||||
|
||||
try:
|
||||
from generatedssuper import GeneratedsSuper
|
||||
except ImportError as exp:
|
||||
|
||||
class GeneratedsSuper:
|
||||
def format_string(self, input_data, input_name=''):
|
||||
return input_data
|
||||
def format_integer(self, input_data, input_name=''):
|
||||
return '%d' % input_data
|
||||
def format_float(self, input_data, input_name=''):
|
||||
return '%f' % input_data
|
||||
def format_double(self, input_data, input_name=''):
|
||||
return '%e' % input_data
|
||||
def format_boolean(self, input_data, input_name=''):
|
||||
return '%s' % input_data
|
||||
|
||||
|
||||
#
|
||||
# If you have installed IPython you can uncomment and use the following.
|
||||
# IPython is available from http://ipython.scipy.org/.
|
||||
#
|
||||
|
||||
## from IPython.Shell import IPShellEmbed
|
||||
## args = ''
|
||||
## ipshell = IPShellEmbed(args,
|
||||
## banner = 'Dropping into IPython',
|
||||
## exit_msg = 'Leaving Interpreter, back to program.')
|
||||
|
||||
# Then use the following line where and when you want to drop into the
|
||||
# IPython shell:
|
||||
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
|
||||
|
||||
#
|
||||
# Globals
|
||||
#
|
||||
|
||||
ExternalEncoding = 'ascii'
|
||||
|
||||
#
|
||||
# Support/utility functions.
|
||||
#
|
||||
|
||||
def showIndent(outfile, level):
|
||||
for idx in range(level):
|
||||
outfile.write(' ')
|
||||
|
||||
def quote_xml(inStr):
|
||||
s1 = (isinstance(inStr, basestring) and inStr or
|
||||
'%s' % inStr)
|
||||
s1 = s1.replace('&', '&')
|
||||
s1 = s1.replace('<', '<')
|
||||
s1 = s1.replace('>', '>')
|
||||
return s1
|
||||
|
||||
def quote_attrib(inStr):
|
||||
s1 = (isinstance(inStr, basestring) and inStr or
|
||||
'%s' % inStr)
|
||||
s1 = s1.replace('&', '&')
|
||||
s1 = s1.replace('<', '<')
|
||||
s1 = s1.replace('>', '>')
|
||||
if '"' in s1:
|
||||
if "'" in s1:
|
||||
s1 = '"%s"' % s1.replace('"', """)
|
||||
else:
|
||||
s1 = "'%s'" % s1
|
||||
else:
|
||||
s1 = '"%s"' % s1
|
||||
return s1
|
||||
|
||||
def quote_python(inStr):
|
||||
s1 = inStr
|
||||
if s1.find("'") == -1:
|
||||
if s1.find('\n') == -1:
|
||||
return "'%s'" % s1
|
||||
else:
|
||||
return "'''%s'''" % s1
|
||||
else:
|
||||
if s1.find('"') != -1:
|
||||
s1 = s1.replace('"', '\\"')
|
||||
if s1.find('\n') == -1:
|
||||
return '"%s"' % s1
|
||||
else:
|
||||
return '"""%s"""' % s1
|
||||
|
||||
|
||||
class MixedContainer:
|
||||
# Constants for category:
|
||||
CategoryNone = 0
|
||||
CategoryText = 1
|
||||
CategorySimple = 2
|
||||
CategoryComplex = 3
|
||||
# Constants for content_type:
|
||||
TypeNone = 0
|
||||
TypeText = 1
|
||||
TypeString = 2
|
||||
TypeInteger = 3
|
||||
TypeFloat = 4
|
||||
TypeDecimal = 5
|
||||
TypeDouble = 6
|
||||
TypeBoolean = 7
|
||||
def __init__(self, category, content_type, name, value):
|
||||
self.category = category
|
||||
self.content_type = content_type
|
||||
self.name = name
|
||||
self.value = value
|
||||
def getCategory(self):
|
||||
return self.category
|
||||
def getContenttype(self, content_type):
|
||||
return self.content_type
|
||||
def getValue(self):
|
||||
return self.value
|
||||
def getName(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class _MemberSpec(object):
|
||||
def __init__(self, name='', data_type='', container=0):
|
||||
self.name = name
|
||||
self.data_type = data_type
|
||||
self.container = container
|
||||
def set_name(self, name): self.name = name
|
||||
def get_name(self): return self.name
|
||||
def set_data_type(self, data_type): self.data_type = data_type
|
||||
def get_data_type(self): return self.data_type
|
||||
def set_container(self, container): self.container = container
|
||||
def get_container(self): return self.container
|
||||
|
||||
|
||||
#
|
||||
# Data representation classes.
|
||||
#
|
||||
|
||||
class DoxygenType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, version=None, compound=None):
|
||||
self.version = version
|
||||
if compound is None:
|
||||
self.compound = []
|
||||
else:
|
||||
self.compound = compound
|
||||
def factory(*args_, **kwargs_):
|
||||
if DoxygenType.subclass:
|
||||
return DoxygenType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return DoxygenType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_compound(self): return self.compound
|
||||
def set_compound(self, compound): self.compound = compound
|
||||
def add_compound(self, value): self.compound.append(value)
|
||||
def insert_compound(self, index, value): self.compound[index] = value
|
||||
def get_version(self): return self.version
|
||||
def set_version(self, version): self.version = version
|
||||
def hasContent_(self):
|
||||
if (
|
||||
self.compound is not None
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('version'):
|
||||
self.version = attrs.get('version').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'compound':
|
||||
obj_ = CompoundType.factory()
|
||||
obj_.build(child_)
|
||||
self.compound.append(obj_)
|
||||
# end class DoxygenType
|
||||
|
||||
|
||||
class CompoundType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, kind=None, refid=None, name=None, member=None):
|
||||
self.kind = kind
|
||||
self.refid = refid
|
||||
self.name = name
|
||||
if member is None:
|
||||
self.member = []
|
||||
else:
|
||||
self.member = member
|
||||
def factory(*args_, **kwargs_):
|
||||
if CompoundType.subclass:
|
||||
return CompoundType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return CompoundType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_name(self): return self.name
|
||||
def set_name(self, name): self.name = name
|
||||
def get_member(self): return self.member
|
||||
def set_member(self, member): self.member = member
|
||||
def add_member(self, value): self.member.append(value)
|
||||
def insert_member(self, index, value): self.member[index] = value
|
||||
def get_kind(self): return self.kind
|
||||
def set_kind(self, kind): self.kind = kind
|
||||
def get_refid(self): return self.refid
|
||||
def set_refid(self, refid): self.refid = refid
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('kind'):
|
||||
self.kind = attrs.get('kind').value
|
||||
if attrs.get('refid'):
|
||||
self.refid = attrs.get('refid').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'name':
|
||||
name_ = ''
|
||||
for text__content_ in child_.childNodes:
|
||||
name_ += text__content_.nodeValue
|
||||
self.name = name_
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'member':
|
||||
obj_ = MemberType.factory()
|
||||
obj_.build(child_)
|
||||
self.member.append(obj_)
|
||||
# end class CompoundType
|
||||
|
||||
|
||||
class MemberType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, kind=None, refid=None, name=None):
|
||||
self.kind = kind
|
||||
self.refid = refid
|
||||
self.name = name
|
||||
def factory(*args_, **kwargs_):
|
||||
if MemberType.subclass:
|
||||
return MemberType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return MemberType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_name(self): return self.name
|
||||
def set_name(self, name): self.name = name
|
||||
def get_kind(self): return self.kind
|
||||
def set_kind(self, kind): self.kind = kind
|
||||
def get_refid(self): return self.refid
|
||||
def set_refid(self, refid): self.refid = refid
|
||||
def hasContent_(self):
|
||||
if (
|
||||
self.name is not None
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('kind'):
|
||||
self.kind = attrs.get('kind').value
|
||||
if attrs.get('refid'):
|
||||
self.refid = attrs.get('refid').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'name':
|
||||
name_ = ''
|
||||
for text__content_ in child_.childNodes:
|
||||
name_ += text__content_.nodeValue
|
||||
self.name = name_
|
||||
# end class MemberType
|
||||
|
||||
|
||||
USAGE_TEXT = """
|
||||
Usage: python <Parser>.py [ -s ] <in_xml_file>
|
||||
Options:
|
||||
-s Use the SAX parser, not the minidom parser.
|
||||
"""
|
||||
|
||||
def usage():
|
||||
print(USAGE_TEXT)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def parse(inFileName):
|
||||
doc = minidom.parse(inFileName)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('<?xml version="1.0" ?>\n')
|
||||
rootObj.export(sys.stdout, 0, name_="doxygenindex",
|
||||
namespacedef_='')
|
||||
return rootObj
|
||||
|
||||
|
||||
def parseString(inString):
|
||||
doc = minidom.parseString(inString)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('<?xml version="1.0" ?>\n')
|
||||
rootObj.export(sys.stdout, 0, name_="doxygenindex",
|
||||
namespacedef_='')
|
||||
return rootObj
|
||||
|
||||
|
||||
def parseLiteral(inFileName):
|
||||
doc = minidom.parse(inFileName)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('from index import *\n\n')
|
||||
sys.stdout.write('rootObj = doxygenindex(\n')
|
||||
rootObj.exportLiteral(sys.stdout, 0, name_="doxygenindex")
|
||||
sys.stdout.write(')\n')
|
||||
return rootObj
|
||||
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
if len(args) == 1:
|
||||
parse(args[0])
|
||||
else:
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
#import pdb
|
||||
#pdb.run('main()')
|
||||
|
Binary file not shown.
@ -1,87 +0,0 @@
|
||||
|
||||
AUTOCFG_TEMPLATE = r"""
|
||||
PROJECT_NAME = "{project_name}"
|
||||
OUTPUT_DIRECTORY = {output_dir}
|
||||
GENERATE_LATEX = NO
|
||||
GENERATE_MAN = NO
|
||||
GENERATE_RTF = NO
|
||||
CASE_SENSE_NAMES = NO
|
||||
INPUT = {input}
|
||||
ENABLE_PREPROCESSING = YES
|
||||
QUIET = YES
|
||||
JAVADOC_AUTOBRIEF = YES
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
GENERATE_HTML = NO
|
||||
GENERATE_XML = YES
|
||||
ALIASES = "rst=\verbatim embed:rst"
|
||||
ALIASES += "endrst=\endverbatim"
|
||||
""".strip()
|
||||
|
||||
|
||||
class ProjectData(object):
|
||||
"Simple handler for the files and project_info for each project"
|
||||
|
||||
def __init__(self, auto_project_info, files):
|
||||
|
||||
self.auto_project_info = auto_project_info
|
||||
self.files = files
|
||||
|
||||
|
||||
class AutoDoxygenProcessHandle(object):
|
||||
|
||||
def __init__(self, path_handler, run_process, write_file, project_info_factory):
|
||||
|
||||
self.path_handler = path_handler
|
||||
self.run_process = run_process
|
||||
self.write_file = write_file
|
||||
self.project_info_factory = project_info_factory
|
||||
|
||||
def generate_xml(self, app):
|
||||
|
||||
project_files = {}
|
||||
|
||||
# First collect together all the files which need to be doxygen processed for each project
|
||||
for project_name, file_structure in app.config.breathe_projects_source.items():
|
||||
|
||||
folder = file_structure[0]
|
||||
contents = file_structure[1]
|
||||
|
||||
auto_project_info = self.project_info_factory.create_auto_project_info(
|
||||
project_name, folder)
|
||||
|
||||
project_files[project_name] = ProjectData(auto_project_info, contents)
|
||||
|
||||
# Iterate over the projects and generate doxygen xml output for the files for each one into
|
||||
# a directory in the Sphinx build area
|
||||
for project_name, data in project_files.items():
|
||||
|
||||
project_path = self.process(data.auto_project_info, data.files)
|
||||
|
||||
project_info = data.auto_project_info.create_project_info(project_path)
|
||||
|
||||
self.project_info_factory.store_project_info_for_auto(project_name, project_info)
|
||||
|
||||
def process(self, auto_project_info, files):
|
||||
|
||||
name = auto_project_info.name()
|
||||
cfgfile = "%s.cfg" % name
|
||||
|
||||
full_paths = map(lambda x: auto_project_info.abs_path_to_source_file(x), files)
|
||||
|
||||
cfg = AUTOCFG_TEMPLATE.format(
|
||||
project_name=name,
|
||||
output_dir=name,
|
||||
input=" ".join(full_paths)
|
||||
)
|
||||
|
||||
build_dir = self.path_handler.join(
|
||||
auto_project_info.build_dir(),
|
||||
"breathe",
|
||||
"doxygen"
|
||||
)
|
||||
|
||||
self.write_file(build_dir, cfgfile, cfg)
|
||||
|
||||
self.run_process(['doxygen', cfgfile], cwd=build_dir)
|
||||
|
||||
return self.path_handler.join(build_dir, name, "xml")
|
Binary file not shown.
@ -1,306 +0,0 @@
|
||||
|
||||
from .exception import BreatheError
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class ProjectError(BreatheError):
|
||||
pass
|
||||
|
||||
|
||||
class NoDefaultProjectError(ProjectError):
|
||||
pass
|
||||
|
||||
|
||||
class AutoProjectInfo(object):
|
||||
"""Created as a temporary step in the automatic xml generation process"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
source_path,
|
||||
build_dir,
|
||||
reference,
|
||||
source_dir,
|
||||
config_dir,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
match
|
||||
):
|
||||
|
||||
self._name = name
|
||||
self._source_path = source_path
|
||||
self._build_dir = build_dir
|
||||
self._reference = reference
|
||||
self._source_dir = source_dir
|
||||
self._config_dir = config_dir
|
||||
self._domain_by_extension = domain_by_extension
|
||||
self._domain_by_file_pattern = domain_by_file_pattern
|
||||
self._match = match
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def build_dir(self):
|
||||
return self._build_dir
|
||||
|
||||
def abs_path_to_source_file(self, file_):
|
||||
"""
|
||||
Returns full path to the provide file assuming that the provided path is relative to the
|
||||
projects conf.py directory as specified in the breathe_projects_source config variable.
|
||||
"""
|
||||
|
||||
# os.path.join does the appropriate handling if _source_path is an absolute path
|
||||
return os.path.join(self._config_dir, self._source_path, file_)
|
||||
|
||||
def create_project_info(self, project_path):
|
||||
"""Creates a proper ProjectInfo object based on the information in this AutoProjectInfo"""
|
||||
|
||||
return ProjectInfo(
|
||||
self._name,
|
||||
project_path,
|
||||
self._source_path,
|
||||
self._reference,
|
||||
self._source_dir,
|
||||
self._config_dir,
|
||||
self._domain_by_extension,
|
||||
self._domain_by_file_pattern,
|
||||
self._match
|
||||
)
|
||||
|
||||
|
||||
class ProjectInfo(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
path,
|
||||
source_path,
|
||||
reference,
|
||||
source_dir,
|
||||
config_dir,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
match
|
||||
):
|
||||
|
||||
self._name = name
|
||||
self._project_path = path
|
||||
self._source_path = source_path
|
||||
self._reference = reference
|
||||
self._source_dir = source_dir
|
||||
self._config_dir = config_dir
|
||||
self._domain_by_extension = domain_by_extension
|
||||
self._domain_by_file_pattern = domain_by_file_pattern
|
||||
self._match = match
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def project_path(self):
|
||||
return self._project_path
|
||||
|
||||
def source_path(self):
|
||||
return self._source_path
|
||||
|
||||
def relative_path_to_xml_file(self, file_):
|
||||
"""
|
||||
Returns relative path from Sphinx documentation top-level source directory to the specified
|
||||
file assuming that the specified file is a path relative to the doxygen xml output
|
||||
directory.
|
||||
"""
|
||||
|
||||
# os.path.join does the appropriate handling if _project_path is an absolute path
|
||||
full_xml_project_path = os.path.join(self._config_dir, self._project_path, file_)
|
||||
|
||||
return os.path.relpath(
|
||||
full_xml_project_path,
|
||||
self._source_dir
|
||||
)
|
||||
|
||||
def sphinx_abs_path_to_file(self, file_):
|
||||
"""
|
||||
Prepends os.path.sep to the value returned by relative_path_to_file.
|
||||
|
||||
This is to match Sphinx's concept of an absolute path which starts from the top-level source
|
||||
directory of the project.
|
||||
"""
|
||||
return os.path.sep + self.relative_path_to_xml_file(file_)
|
||||
|
||||
def reference(self):
|
||||
return self._reference
|
||||
|
||||
def domain_for_file(self, file_):
|
||||
|
||||
domain = ""
|
||||
extension = file_.split(".")[-1]
|
||||
|
||||
try:
|
||||
domain = self._domain_by_extension[extension]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for pattern, pattern_domain in self._domain_by_file_pattern.items():
|
||||
if self._match(file_, pattern):
|
||||
domain = pattern_domain
|
||||
|
||||
return domain
|
||||
|
||||
|
||||
class ProjectInfoFactory(object):
|
||||
|
||||
def __init__(self, source_dir, build_dir, config_dir, match):
|
||||
|
||||
self.source_dir = source_dir
|
||||
self.build_dir = build_dir
|
||||
self.config_dir = config_dir
|
||||
self.match = match
|
||||
|
||||
self.projects = {}
|
||||
self.default_project = None
|
||||
self.domain_by_extension = {}
|
||||
self.domain_by_file_pattern = {}
|
||||
|
||||
self.project_count = 0
|
||||
self.project_info_store = {}
|
||||
self.project_info_for_auto_store = {}
|
||||
self.auto_project_info_store = {}
|
||||
|
||||
def update(
|
||||
self,
|
||||
projects,
|
||||
default_project,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
projects_source,
|
||||
build_dir
|
||||
):
|
||||
|
||||
self.projects = projects
|
||||
self.default_project = default_project
|
||||
self.domain_by_extension = domain_by_extension
|
||||
self.domain_by_file_pattern = domain_by_file_pattern
|
||||
self.projects_source = projects_source
|
||||
|
||||
# If the breathe config values has a non-empty value for build_dir then use that otherwise
|
||||
# stick with the default
|
||||
if build_dir:
|
||||
self.build_dir = build_dir
|
||||
|
||||
def default_path(self):
|
||||
|
||||
if not self.default_project:
|
||||
raise NoDefaultProjectError(
|
||||
"No breathe_default_project config setting to fall back on "
|
||||
"for directive with no 'project' or 'path' specified."
|
||||
)
|
||||
|
||||
try:
|
||||
return self.projects[self.default_project]
|
||||
except KeyError:
|
||||
raise ProjectError(
|
||||
("breathe_default_project value '%s' does not seem to be a valid key for the "
|
||||
"breathe_projects dictionary") % self.default_project
|
||||
)
|
||||
|
||||
def create_project_info(self, options):
|
||||
|
||||
name = ""
|
||||
|
||||
if "project" in options:
|
||||
try:
|
||||
path = self.projects[options["project"]]
|
||||
name = options["project"]
|
||||
except KeyError:
|
||||
raise ProjectError("Unable to find project '%s' in breathe_projects dictionary"
|
||||
% options["project"])
|
||||
|
||||
elif "path" in options:
|
||||
path = options["path"]
|
||||
|
||||
else:
|
||||
path = self.default_path()
|
||||
|
||||
try:
|
||||
return self.project_info_store[path]
|
||||
except KeyError:
|
||||
|
||||
reference = name
|
||||
|
||||
if not name:
|
||||
name = "project%s" % self.project_count
|
||||
reference = path
|
||||
self.project_count += 1
|
||||
|
||||
project_info = ProjectInfo(
|
||||
name,
|
||||
path,
|
||||
"NoSourcePath",
|
||||
reference,
|
||||
self.source_dir,
|
||||
self.config_dir,
|
||||
self.domain_by_extension,
|
||||
self.domain_by_file_pattern,
|
||||
self.match
|
||||
)
|
||||
|
||||
self.project_info_store[path] = project_info
|
||||
|
||||
return project_info
|
||||
|
||||
def store_project_info_for_auto(self, name, project_info):
|
||||
"""Stores the project info by name for later extraction by the auto directives.
|
||||
|
||||
Stored separately to the non-auto project info objects as they should never overlap.
|
||||
"""
|
||||
|
||||
self.project_info_for_auto_store[name] = project_info
|
||||
|
||||
def retrieve_project_info_for_auto(self, options):
|
||||
"""Retrieves the project info by name for later extraction by the auto directives.
|
||||
|
||||
Looks for the 'project' entry in the options dictionary. This is a less than ideal API but
|
||||
it is designed to match the use of 'create_project_info' above for which it makes much more
|
||||
sense.
|
||||
"""
|
||||
|
||||
name = options.get('project', self.default_project)
|
||||
|
||||
if name is None:
|
||||
raise NoDefaultProjectError(
|
||||
"No breathe_default_project config setting to fall back on "
|
||||
"for directive with no 'project' or 'path' specified."
|
||||
)
|
||||
|
||||
return self.project_info_for_auto_store[name]
|
||||
|
||||
def create_auto_project_info(self, name, source_path):
|
||||
|
||||
key = source_path
|
||||
|
||||
try:
|
||||
return self.auto_project_info_store[key]
|
||||
except KeyError:
|
||||
|
||||
reference = name
|
||||
|
||||
if not name:
|
||||
name = "project%s" % self.project_count
|
||||
reference = source_path
|
||||
self.project_count += 1
|
||||
|
||||
auto_project_info = AutoProjectInfo(
|
||||
name,
|
||||
source_path,
|
||||
self.build_dir,
|
||||
reference,
|
||||
self.source_dir,
|
||||
self.config_dir,
|
||||
self.domain_by_extension,
|
||||
self.domain_by_file_pattern,
|
||||
self.match
|
||||
)
|
||||
|
||||
self.auto_project_info_store[key] = auto_project_info
|
||||
|
||||
return auto_project_info
|
Binary file not shown.
Binary file not shown.
@ -1,2 +0,0 @@
|
||||
|
||||
|
Binary file not shown.
@ -1,383 +0,0 @@
|
||||
|
||||
from .base import Renderer, RenderContext
|
||||
from . import index as indexrenderer
|
||||
from . import compound as compoundrenderer
|
||||
|
||||
from docutils import nodes
|
||||
import textwrap
|
||||
|
||||
class RstContentCreator(object):
|
||||
|
||||
def __init__(self, list_type, dedent):
|
||||
|
||||
self.list_type = list_type
|
||||
self.dedent = dedent
|
||||
|
||||
def __call__(self, text):
|
||||
|
||||
# Remove the first line which is "embed:rst[:leading-asterisk]"
|
||||
text = "\n".join(text.split(u"\n")[1:])
|
||||
|
||||
# Remove starting whitespace
|
||||
text = self.dedent(text)
|
||||
|
||||
# Inspired by autodoc.py in Sphinx
|
||||
result = self.list_type()
|
||||
for line in text.split("\n"):
|
||||
result.append(line, "<breathe>")
|
||||
|
||||
return result
|
||||
|
||||
class UnicodeRenderer(Renderer):
|
||||
|
||||
def render(self):
|
||||
|
||||
# Skip any nodes that are pure whitespace
|
||||
# Probably need a better way to do this as currently we're only doing
|
||||
# it skip whitespace between higher-level nodes, but this will also
|
||||
# skip any pure whitespace entries in actual content nodes
|
||||
#
|
||||
# We counter that second issue slightly by allowing through single white spaces
|
||||
#
|
||||
if self.data_object.strip():
|
||||
return [self.node_factory.Text(self.data_object)]
|
||||
elif self.data_object == unicode(" "):
|
||||
return [self.node_factory.Text(self.data_object)]
|
||||
else:
|
||||
return []
|
||||
|
||||
class NullRenderer(Renderer):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def render(self):
|
||||
return []
|
||||
|
||||
|
||||
class DoxygenToRstRendererFactory(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_type,
|
||||
renderers,
|
||||
renderer_factory_creator,
|
||||
node_factory,
|
||||
project_info,
|
||||
state,
|
||||
document,
|
||||
rst_content_creator,
|
||||
filter_,
|
||||
target_handler,
|
||||
domain_directive_factory
|
||||
):
|
||||
|
||||
self.node_type = node_type
|
||||
self.node_factory = node_factory
|
||||
self.project_info = project_info
|
||||
self.renderers = renderers
|
||||
self.renderer_factory_creator = renderer_factory_creator
|
||||
self.state = state
|
||||
self.document = document
|
||||
self.rst_content_creator = rst_content_creator
|
||||
self.filter_ = filter_
|
||||
self.target_handler = target_handler
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
|
||||
def create_renderer(
|
||||
self,
|
||||
context
|
||||
):
|
||||
|
||||
parent_data_object = context.node_stack[1]
|
||||
data_object = context.node_stack[0]
|
||||
|
||||
if not self.filter_.allow(context.node_stack):
|
||||
return NullRenderer()
|
||||
|
||||
child_renderer_factory = self.renderer_factory_creator.create_child_factory(
|
||||
self.project_info,
|
||||
data_object,
|
||||
self
|
||||
)
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
Renderer = self.renderers[node_type]
|
||||
|
||||
common_args = [
|
||||
self.project_info,
|
||||
context,
|
||||
child_renderer_factory,
|
||||
self.node_factory,
|
||||
self.state,
|
||||
self.document,
|
||||
self.target_handler,
|
||||
self.domain_directive_factory
|
||||
]
|
||||
|
||||
if node_type == "docmarkup":
|
||||
|
||||
creator = self.node_factory.inline
|
||||
if data_object.type_ == "emphasis":
|
||||
creator = self.node_factory.emphasis
|
||||
elif data_object.type_ == "computeroutput":
|
||||
creator = self.node_factory.literal
|
||||
elif data_object.type_ == "bold":
|
||||
creator = self.node_factory.strong
|
||||
elif data_object.type_ == "superscript":
|
||||
creator = self.node_factory.superscript
|
||||
elif data_object.type_ == "subscript":
|
||||
creator = self.node_factory.subscript
|
||||
elif data_object.type_ == "center":
|
||||
print("Warning: does not currently handle 'center' text display")
|
||||
elif data_object.type_ == "small":
|
||||
print("Warning: does not currently handle 'small' text display")
|
||||
|
||||
return Renderer(
|
||||
creator,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "verbatim":
|
||||
|
||||
return Renderer(
|
||||
self.rst_content_creator,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "compound":
|
||||
|
||||
kind = data_object.kind
|
||||
if kind in ["file", "dir", "page", "example", "group"]:
|
||||
return Renderer(indexrenderer.FileRenderer, *common_args)
|
||||
|
||||
class_ = indexrenderer.CompoundTypeSubRenderer
|
||||
|
||||
# For compound node types Renderer is CreateCompoundTypeSubRenderer
|
||||
# as defined below. This could be cleaner
|
||||
return Renderer(
|
||||
class_,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "memberdef":
|
||||
|
||||
if data_object.kind in ("function", "slot") or (data_object.kind == 'friend' and data_object.argsstring):
|
||||
Renderer = compoundrenderer.FuncMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "enum":
|
||||
Renderer = compoundrenderer.EnumMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "typedef":
|
||||
Renderer = compoundrenderer.TypedefMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "variable":
|
||||
Renderer = compoundrenderer.VariableMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "define":
|
||||
Renderer = compoundrenderer.DefineMemberDefTypeSubRenderer
|
||||
|
||||
if node_type == "param":
|
||||
return Renderer(
|
||||
parent_data_object.node_type != "templateparamlist",
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "docsimplesect":
|
||||
if data_object.kind == "par":
|
||||
Renderer = compoundrenderer.ParDocSimpleSectTypeSubRenderer
|
||||
|
||||
return Renderer(
|
||||
*common_args
|
||||
)
|
||||
|
||||
class CreateCompoundTypeSubRenderer(object):
|
||||
|
||||
def __init__(self, parser_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def __call__(self, class_, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return class_(compound_parser, project_info, *args)
|
||||
|
||||
|
||||
class CreateRefTypeSubRenderer(object):
|
||||
|
||||
def __init__(self, parser_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def __call__(self, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return compoundrenderer.RefTypeSubRenderer(compound_parser, project_info, *args)
|
||||
|
||||
|
||||
class DoxygenToRstRendererFactoryCreator(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_factory,
|
||||
parser_factory,
|
||||
domain_directive_factory,
|
||||
rst_content_creator,
|
||||
project_info
|
||||
):
|
||||
|
||||
self.node_factory = node_factory
|
||||
self.parser_factory = parser_factory
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
self.rst_content_creator = rst_content_creator
|
||||
self.project_info = project_info
|
||||
|
||||
def create_factory(self, node_stack, state, document, filter_, target_handler):
|
||||
|
||||
data_object = node_stack[0]
|
||||
|
||||
renderers = {
|
||||
"doxygen" : indexrenderer.DoxygenTypeSubRenderer,
|
||||
"compound" : CreateCompoundTypeSubRenderer(self.parser_factory),
|
||||
"doxygendef" : compoundrenderer.DoxygenTypeSubRenderer,
|
||||
"compounddef" : compoundrenderer.CompoundDefTypeSubRenderer,
|
||||
"sectiondef" : compoundrenderer.SectionDefTypeSubRenderer,
|
||||
"memberdef" : compoundrenderer.MemberDefTypeSubRenderer,
|
||||
"enumvalue" : compoundrenderer.EnumvalueTypeSubRenderer,
|
||||
"linkedtext" : compoundrenderer.LinkedTextTypeSubRenderer,
|
||||
"description" : compoundrenderer.DescriptionTypeSubRenderer,
|
||||
"param" : compoundrenderer.ParamTypeSubRenderer,
|
||||
"docreftext" : compoundrenderer.DocRefTextTypeSubRenderer,
|
||||
"docheading" : compoundrenderer.DocHeadingTypeSubRenderer,
|
||||
"docpara" : compoundrenderer.DocParaTypeSubRenderer,
|
||||
"docmarkup" : compoundrenderer.DocMarkupTypeSubRenderer,
|
||||
"docparamlist" : compoundrenderer.DocParamListTypeSubRenderer,
|
||||
"docparamlistitem" : compoundrenderer.DocParamListItemSubRenderer,
|
||||
"docparamnamelist" : compoundrenderer.DocParamNameListSubRenderer,
|
||||
"docparamname" : compoundrenderer.DocParamNameSubRenderer,
|
||||
"docsect1" : compoundrenderer.DocSect1TypeSubRenderer,
|
||||
"docsimplesect" : compoundrenderer.DocSimpleSectTypeSubRenderer,
|
||||
"doctitle" : compoundrenderer.DocTitleTypeSubRenderer,
|
||||
"docformula" : compoundrenderer.DocForumlaTypeSubRenderer,
|
||||
"docimage" : compoundrenderer.DocImageTypeSubRenderer,
|
||||
"docurllink" : compoundrenderer.DocURLLinkSubRenderer,
|
||||
"listing" : compoundrenderer.ListingTypeSubRenderer,
|
||||
"codeline" : compoundrenderer.CodeLineTypeSubRenderer,
|
||||
"highlight" : compoundrenderer.HighlightTypeSubRenderer,
|
||||
"templateparamlist" : compoundrenderer.TemplateParamListRenderer,
|
||||
"inc" : compoundrenderer.IncTypeSubRenderer,
|
||||
"ref" : CreateRefTypeSubRenderer(self.parser_factory),
|
||||
"verbatim" : compoundrenderer.VerbatimTypeSubRenderer,
|
||||
"mixedcontainer" : compoundrenderer.MixedContainerRenderer,
|
||||
"unicode" : UnicodeRenderer,
|
||||
"doclist": compoundrenderer.DocListTypeSubRenderer,
|
||||
"doclistitem": compoundrenderer.DocListItemTypeSubRenderer,
|
||||
}
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
return DoxygenToRstRendererFactory(
|
||||
"root",
|
||||
renderers,
|
||||
self,
|
||||
self.node_factory,
|
||||
self.project_info,
|
||||
state,
|
||||
document,
|
||||
self.rst_content_creator,
|
||||
filter_,
|
||||
target_handler,
|
||||
self.domain_directive_factory
|
||||
)
|
||||
|
||||
def create_child_factory( self, project_info, data_object, parent_renderer_factory ):
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
return DoxygenToRstRendererFactory(
|
||||
node_type,
|
||||
parent_renderer_factory.renderers,
|
||||
self,
|
||||
self.node_factory,
|
||||
parent_renderer_factory.project_info,
|
||||
parent_renderer_factory.state,
|
||||
parent_renderer_factory.document,
|
||||
self.rst_content_creator,
|
||||
parent_renderer_factory.filter_,
|
||||
parent_renderer_factory.target_handler,
|
||||
parent_renderer_factory.domain_directive_factory
|
||||
)
|
||||
|
||||
|
||||
# FactoryFactoryFactory. Ridiculous but necessary.
|
||||
class DoxygenToRstRendererFactoryCreatorConstructor(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_factory,
|
||||
parser_factory,
|
||||
domain_directive_factory,
|
||||
rst_content_creator
|
||||
):
|
||||
|
||||
self.node_factory = node_factory
|
||||
self.parser_factory = parser_factory
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
self.rst_content_creator = rst_content_creator
|
||||
|
||||
def create_factory_creator(self, project_info, document, options, target_handler):
|
||||
|
||||
return DoxygenToRstRendererFactoryCreator(
|
||||
self.node_factory,
|
||||
self.parser_factory,
|
||||
self.domain_directive_factory,
|
||||
self.rst_content_creator,
|
||||
project_info,
|
||||
)
|
||||
|
||||
|
||||
def format_parser_error(name, error, filename, state, lineno, do_unicode_warning):
|
||||
|
||||
warning = '%s: Unable to parse xml file "%s". ' % (name, filename)
|
||||
explanation = 'Reported error: %s. ' % error
|
||||
|
||||
unicode_explanation_text = ""
|
||||
unicode_explanation = []
|
||||
if do_unicode_warning:
|
||||
unicode_explanation_text = textwrap.dedent("""
|
||||
Parsing errors are often due to unicode errors associated with the encoding of the original
|
||||
source files. Doxygen propagates invalid characters from the input source files to the
|
||||
output xml.""").strip().replace("\n", " ")
|
||||
unicode_explanation = [nodes.paragraph("", "", nodes.Text(unicode_explanation_text))]
|
||||
|
||||
return [nodes.warning("",
|
||||
nodes.paragraph("", "", nodes.Text(warning)),
|
||||
nodes.paragraph("", "", nodes.Text(explanation)),
|
||||
*unicode_explanation
|
||||
),
|
||||
state.document.reporter.warning(warning + explanation + unicode_explanation_text, line=lineno)
|
||||
]
|
Binary file not shown.
@ -1,127 +0,0 @@
|
||||
|
||||
class Renderer(object):
|
||||
|
||||
def __init__(self,
|
||||
project_info,
|
||||
context,
|
||||
renderer_factory,
|
||||
node_factory,
|
||||
state,
|
||||
document,
|
||||
target_handler,
|
||||
domain_directive_factory,
|
||||
):
|
||||
|
||||
self.project_info = project_info
|
||||
self.context = context
|
||||
self.data_object = context.node_stack[0]
|
||||
self.renderer_factory = renderer_factory
|
||||
self.node_factory = node_factory
|
||||
self.state = state
|
||||
self.document = document
|
||||
self.target_handler = target_handler
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
|
||||
if self.context.domain == '':
|
||||
self.context.domain = self.get_domain()
|
||||
|
||||
def get_domain(self):
|
||||
"""Returns the domain for the current node."""
|
||||
|
||||
def get_filename(node):
|
||||
"""Returns the name of a file where the declaration represented by node is located."""
|
||||
try:
|
||||
return node.location.file
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
node_stack = self.context.node_stack
|
||||
node = node_stack[0]
|
||||
# An enumvalue node doesn't have location, so use its parent node for detecting the domain instead.
|
||||
if type(node) == unicode or node.node_type == "enumvalue":
|
||||
node = node_stack[1]
|
||||
filename = get_filename(node)
|
||||
if not filename and node.node_type == "compound":
|
||||
file_data = self.compound_parser.parse(node.refid)
|
||||
filename = get_filename(file_data.compounddef)
|
||||
return self.project_info.domain_for_file(filename) if filename else ''
|
||||
|
||||
def get_fully_qualified_name(self):
|
||||
|
||||
names = []
|
||||
node_stack = self.context.node_stack
|
||||
node = node_stack[0]
|
||||
if node.node_type == 'enumvalue':
|
||||
names.append(node.name)
|
||||
# Skip the name of the containing enum because it is not a part of the fully qualified name.
|
||||
node_stack = node_stack[2:]
|
||||
|
||||
# If the node is a namespace, use its name because namespaces are skipped in the main loop.
|
||||
if node.node_type == 'compound' and node.kind == 'namespace':
|
||||
names.append(node.name)
|
||||
|
||||
for node in node_stack:
|
||||
if node.node_type == 'ref' and len(names) == 0:
|
||||
return node.valueOf_
|
||||
if (node.node_type == 'compound' and node.kind not in ['file', 'namespace']) or \
|
||||
node.node_type == 'memberdef':
|
||||
# We skip the 'file' entries because the file name doesn't form part of the
|
||||
# qualified name for the identifier. We skip the 'namespace' entries because if we
|
||||
# find an object through the namespace 'compound' entry in the index.xml then we'll
|
||||
# also have the 'compounddef' entry in our node stack and we'll get it from that. We
|
||||
# need the 'compounddef' entry because if we find the object through the 'file'
|
||||
# entry in the index.xml file then we need to get the namespace name from somewhere
|
||||
names.insert(0, node.name)
|
||||
if (node.node_type == 'compounddef' and node.kind == 'namespace'):
|
||||
# Nested namespaces include their parent namespace(s) in compoundname. ie,
|
||||
# compoundname is 'foo::bar' instead of just 'bar' for namespace 'bar' nested in
|
||||
# namespace 'foo'. We need full compoundname because node_stack doesn't necessarily
|
||||
# include parent namespaces and we stop here in case it does.
|
||||
names.insert(0, node.compoundname)
|
||||
break
|
||||
|
||||
return '::'.join(names)
|
||||
|
||||
def create_template_node(self, decl):
|
||||
"""Creates a node for the ``template <...>`` part of the declaration."""
|
||||
if not decl.templateparamlist:
|
||||
return None
|
||||
context = self.context.create_child_context(decl.templateparamlist)
|
||||
renderer = self.renderer_factory.create_renderer(context)
|
||||
nodes = [self.node_factory.Text("template <")]
|
||||
nodes.extend(renderer.render())
|
||||
nodes.append(self.node_factory.Text(">"))
|
||||
signode = self.node_factory.desc_signature()
|
||||
signode.extend(nodes)
|
||||
return signode
|
||||
|
||||
def run_domain_directive(self, kind, names):
|
||||
domain_directive = self.renderer_factory.domain_directive_factory.create(
|
||||
self.context.domain, [kind, names] + self.context.directive_args[2:])
|
||||
|
||||
# Translate Breathe's no-link option into the standard noindex option.
|
||||
if 'no-link' in self.context.directive_args[2]:
|
||||
domain_directive.options['noindex'] = True
|
||||
nodes = domain_directive.run()
|
||||
|
||||
# Filter out outer class names if we are rendering a member as a part of a class content.
|
||||
signode = nodes[1].children[0]
|
||||
if len(names) > 0 and self.context.child:
|
||||
signode.children = [n for n in signode.children if not n.tagname == 'desc_addname']
|
||||
return nodes
|
||||
|
||||
|
||||
class RenderContext(object):
|
||||
|
||||
def __init__(self, node_stack, mask_factory, directive_args, domain='', child=False):
|
||||
self.node_stack = node_stack
|
||||
self.mask_factory = mask_factory
|
||||
self.directive_args = directive_args
|
||||
self.domain = domain
|
||||
self.child = child
|
||||
|
||||
def create_child_context(self, data_object):
|
||||
|
||||
node_stack = self.node_stack[:]
|
||||
node_stack.insert(0, self.mask_factory.mask(data_object))
|
||||
return RenderContext(node_stack, self.mask_factory, self.directive_args, self.domain, True)
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,115 +0,0 @@
|
||||
|
||||
from .base import Renderer
|
||||
|
||||
class DoxygenTypeSubRenderer(Renderer):
|
||||
|
||||
def render(self):
|
||||
|
||||
nodelist = []
|
||||
|
||||
# Process all the compound children
|
||||
for compound in self.data_object.get_compound():
|
||||
context = self.context.create_child_context(compound)
|
||||
compound_renderer = self.renderer_factory.create_renderer(context)
|
||||
nodelist.extend(compound_renderer.render())
|
||||
|
||||
return nodelist
|
||||
|
||||
|
||||
class CompoundRenderer(Renderer):
|
||||
"""Base class for CompoundTypeSubRenderer and RefTypeSubRenderer."""
|
||||
|
||||
def __init__(self, compound_parser, render_empty_node, *args):
|
||||
self.compound_parser = compound_parser
|
||||
self.render_empty_node = render_empty_node
|
||||
Renderer.__init__(self, *args)
|
||||
|
||||
def create_doxygen_target(self):
|
||||
"""Can be overridden to create a target node which uses the doxygen refid information
|
||||
which can be used for creating links between internal doxygen elements.
|
||||
|
||||
The default implementation should suffice most of the time.
|
||||
"""
|
||||
|
||||
refid = "%s%s" % (self.project_info.name(), self.data_object.refid)
|
||||
return self.target_handler.create_target(refid)
|
||||
|
||||
def render_signature(self, file_data, doxygen_target):
|
||||
# Defer to domains specific directive.
|
||||
name, kind = self.get_node_info(file_data)
|
||||
self.context.directive_args[1] = [self.get_fully_qualified_name()]
|
||||
nodes = self.run_domain_directive(kind, self.context.directive_args[1])
|
||||
node = nodes[1]
|
||||
signode, contentnode = node.children
|
||||
|
||||
# The cpp domain in Sphinx doesn't support structs at the moment, so change the text from "class "
|
||||
# to the correct kind which can be "class " or "struct ".
|
||||
signode[0] = self.node_factory.desc_annotation(kind + ' ', kind + ' ')
|
||||
|
||||
# Check if there is template information and format it as desired
|
||||
template_signode = self.create_template_node(file_data.compounddef)
|
||||
if template_signode:
|
||||
node.insert(0, template_signode)
|
||||
node.children[0].insert(0, doxygen_target)
|
||||
return nodes, contentnode
|
||||
|
||||
def render(self):
|
||||
|
||||
# Read in the corresponding xml file and process
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
|
||||
parent_context = self.context.create_child_context(file_data)
|
||||
data_renderer = self.renderer_factory.create_renderer(parent_context)
|
||||
rendered_data = data_renderer.render()
|
||||
|
||||
if not rendered_data and not self.render_empty_node:
|
||||
return []
|
||||
|
||||
file_data = parent_context.node_stack[0]
|
||||
new_context = parent_context.create_child_context(file_data.compounddef)
|
||||
|
||||
nodes, contentnode = self.render_signature(file_data, self.create_doxygen_target())
|
||||
|
||||
if file_data.compounddef.includes:
|
||||
for include in file_data.compounddef.includes:
|
||||
context = new_context.create_child_context(include)
|
||||
renderer = self.renderer_factory.create_renderer(context)
|
||||
contentnode.extend(renderer.render())
|
||||
|
||||
contentnode.extend(rendered_data)
|
||||
return nodes
|
||||
|
||||
|
||||
class CompoundTypeSubRenderer(CompoundRenderer):
|
||||
|
||||
def __init__(self, compound_parser, *args):
|
||||
CompoundRenderer.__init__(self, compound_parser, True, *args)
|
||||
|
||||
def get_node_info(self, file_data):
|
||||
return self.data_object.name, self.data_object.kind
|
||||
|
||||
|
||||
class FileRenderer(CompoundTypeSubRenderer):
|
||||
|
||||
def render_signature(self, file_data, doxygen_target):
|
||||
# Build targets for linking
|
||||
targets = []
|
||||
targets.extend(doxygen_target)
|
||||
|
||||
title_signode = self.node_factory.desc_signature()
|
||||
title_signode.extend(targets)
|
||||
|
||||
# Set up the title
|
||||
name, kind = self.get_node_info(file_data)
|
||||
title_signode.append(self.node_factory.emphasis(text=kind))
|
||||
title_signode.append(self.node_factory.Text(" "))
|
||||
title_signode.append(self.node_factory.desc_name(text=name))
|
||||
|
||||
contentnode = self.node_factory.desc_content()
|
||||
|
||||
node = self.node_factory.desc()
|
||||
node.document = self.state.document
|
||||
node['objtype'] = kind
|
||||
node.append(title_signode)
|
||||
node.append(contentnode)
|
||||
return [node], contentnode
|
Binary file not shown.
@ -1,62 +0,0 @@
|
||||
"""
|
||||
Masks
|
||||
=====
|
||||
|
||||
Masks are related to filters. Filters can block the processing of particular parts of the xml
|
||||
hierarchy but they can only work on node level. If the part of the xml hierarchy that you want to
|
||||
filter out is read in as an instance of one of the classes in parser/doxygen/*.py then you can use
|
||||
the filters. However, if you want to filter out an attribute from one of the nodes (and some of the
|
||||
xml child nodes are read in as attributes on their parents) then you can't use a filter.
|
||||
|
||||
We introduce the Mask's to fulfil this need. The masks are designed to be applied to a particular
|
||||
node type and to limit the access to particular attributes on the node. For example, then
|
||||
NoParameterNamesMask wraps a node a returns all its standard attributes but returns None for the
|
||||
'declname' and 'defname' attributes.
|
||||
|
||||
Currently the Mask functionality is only used for the text signature rendering for doing function
|
||||
matching.
|
||||
|
||||
"""
|
||||
|
||||
class NoParameterNamesMask(object):
|
||||
|
||||
def __init__(self, data_object):
|
||||
self.data_object = data_object
|
||||
|
||||
def __getattr__(self, attr):
|
||||
|
||||
if attr in ['declname', 'defname', 'defval']:
|
||||
return None
|
||||
|
||||
return getattr(self.data_object, attr)
|
||||
|
||||
class MaskFactory(object):
|
||||
|
||||
def __init__(self, lookup):
|
||||
self.lookup = lookup
|
||||
|
||||
def mask(self, data_object):
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
if node_type in self.lookup:
|
||||
Mask = self.lookup[node_type]
|
||||
return Mask(data_object)
|
||||
|
||||
return data_object
|
||||
|
||||
|
||||
class NullMaskFactory(object):
|
||||
|
||||
def mask(self, data_object):
|
||||
return data_object
|
||||
|
Binary file not shown.
@ -1,40 +0,0 @@
|
||||
|
||||
class TargetHandler(object):
|
||||
|
||||
def __init__(self, project_info, node_factory, document):
|
||||
|
||||
self.project_info = project_info
|
||||
self.node_factory = node_factory
|
||||
self.document = document
|
||||
|
||||
def create_target(self, id_):
|
||||
"""Creates a target node and registers it with the document and returns it in a list"""
|
||||
|
||||
target = self.node_factory.target(ids=[id_], names=[id_])
|
||||
|
||||
try:
|
||||
self.document.note_explicit_target(target)
|
||||
except Exception:
|
||||
# TODO: We should really return a docutils warning node here
|
||||
print("Warning: Duplicate target detected: %s" % id_)
|
||||
|
||||
return [target]
|
||||
|
||||
class NullTargetHandler(object):
|
||||
|
||||
def create_target(self, refid):
|
||||
return []
|
||||
|
||||
class TargetHandlerFactory(object):
|
||||
|
||||
def __init__(self, node_factory):
|
||||
|
||||
self.node_factory = node_factory
|
||||
|
||||
def create_target_handler(self, options, project_info, document):
|
||||
|
||||
if options.has_key("no-link"):
|
||||
return NullTargetHandler()
|
||||
|
||||
return TargetHandler(project_info, self.node_factory, document)
|
||||
|
Binary file not shown.
@ -36,7 +36,7 @@ extensions = [
|
||||
'sphinx.ext.pngmath',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'breathe',
|
||||
# 'breathe',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
@ -51,7 +51,7 @@ source_suffix = '.rst'
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = 'source/index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'OBITools3'
|
||||
@ -292,7 +292,7 @@ texinfo_documents = [
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
#Breathe configuration
|
||||
sys.path.append( "../breathe/" )
|
||||
breathe_projects = { "OBITools3": "../doxygen/xml/" }
|
||||
sys.path.append( "breathe/" )
|
||||
breathe_projects = { "OBITools3": "doxygen/xml/" }
|
||||
breathe_default_project = "OBITools3"
|
||||
|
4
doc/doxygen/xml/.gitignore
vendored
4
doc/doxygen/xml/.gitignore
vendored
@ -1,4 +0,0 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
@ -2,19 +2,19 @@
|
||||
The OBItools3 Data Management System (OBIDMS)
|
||||
*********************************************
|
||||
|
||||
A complete DNA Metabarcoding experiment rely on several kinds of data.
|
||||
A complete DNA metabarcoding experiment relies on several kinds of data.
|
||||
|
||||
- The sequence data resulting of the PCR products sequencing,
|
||||
- The sequence data resulting from the sequencing of the PCR products,
|
||||
- The description of the samples including all their metadata,
|
||||
- One or several refence database used for the taxonomical annotation
|
||||
- One or several taxonomies.
|
||||
- One or several reference databases used for the taxonomic annotation,
|
||||
- One or several taxonomy databases.
|
||||
|
||||
Up to now each of these categories of data were stored in separate
|
||||
files an nothing obliged to keep them together.
|
||||
Up to now, each of these categories of data were stored in separate
|
||||
files, and nothing made it mandatory to keep them together.
|
||||
|
||||
|
||||
The `Data Management System` (DMS) of OBITools3 can be considered
|
||||
as a basic database system.
|
||||
The `Data Management System` (DMS) of OBITools3 can be viewed like a basic
|
||||
database system.
|
||||
|
||||
|
||||
OBIDMS UML
|
||||
@ -25,11 +25,21 @@ OBIDMS UML
|
||||
:download:`html version of the OBIDMS UML file <UML/ObiDMS_UML.class.violet.html>`
|
||||
|
||||
|
||||
An OBIDMS directory consists of :
|
||||
* OBIDMS column files
|
||||
* OBIDMS release files
|
||||
* OBIDMS dictionary files
|
||||
* one OBIDMS history file
|
||||
An OBIDMS directory contains :
|
||||
* one `OBIDMS history file <#obidms-history-files>`_
|
||||
* OBIDMS column directories
|
||||
|
||||
|
||||
OBIDMS column directories
|
||||
=========================
|
||||
|
||||
OBIDMS column directories contain :
|
||||
* all the different versions of one OBIDMS column, under the form of different files (`OBIDMS column files <#obidms-column-files>`_)
|
||||
* one `OBIDMS version file <#obidms-version-files>`_
|
||||
|
||||
The directory name is the column attribute with the extension ``.obicol``.
|
||||
|
||||
Example: ``count.obicol``
|
||||
|
||||
|
||||
OBIDMS column files
|
||||
@ -38,7 +48,7 @@ OBIDMS column files
|
||||
Each OBIDMS column file contains :
|
||||
* a header of a size equal to a multiple of PAGESIZE (PAGESIZE being equal to 4096 bytes
|
||||
on most systems) containing metadata
|
||||
* one column of data with the same OBIType
|
||||
* Lines of data with the same `OBIType <types.html#obitypes>`_
|
||||
|
||||
|
||||
Header
|
||||
@ -48,27 +58,33 @@ The header of an OBIDMS column contains :
|
||||
|
||||
* Endian byte order
|
||||
* Header size (PAGESIZE multiple)
|
||||
*
|
||||
* File status : Open/Closed
|
||||
* Owner : PID of the process that created the file and is the only one allowed to modify it if it is open
|
||||
* Number of lines (total or without the header?)
|
||||
* OBIType
|
||||
* Date of creation
|
||||
* Version of the file
|
||||
* Number of lines of data
|
||||
* Number of lines of data used
|
||||
* `OBIType <types.html#obitypes>`_ (type of the data)
|
||||
* Date of creation of the file
|
||||
* Version of the OBIDMS column
|
||||
* The column name
|
||||
* Eventual comments
|
||||
|
||||
|
||||
Data
|
||||
----
|
||||
|
||||
A column of data with the same OBIType.
|
||||
A line of data corresponds to a vector of elements. Each element is associated with an element name.
|
||||
Elements names are stored in the header. The correspondance between an element and its name is done
|
||||
using their order in the lists of elements and elements names. This structure allows the storage of
|
||||
dictionary-like data.
|
||||
|
||||
Example: In the header, the attribute ``elements_names`` will be associated with the value ``"sample_1;
|
||||
sample_2;sample_3"``, and a line of data with the type ``OBInt_t`` will be stored as an ``OBInt_t`` vector
|
||||
of size three e.g. ``5|8|4``.
|
||||
|
||||
|
||||
Mandatory columns
|
||||
-----------------
|
||||
|
||||
Some columns must exist in an OBIDMS directory :
|
||||
* sequence identifiers column (type *OBIStr_t*)
|
||||
* sequence identifiers column (type ``OBIStr_t``)
|
||||
|
||||
|
||||
File name
|
||||
@ -83,8 +99,7 @@ Example : ``count@3.odc``
|
||||
Modifications
|
||||
-------------
|
||||
|
||||
An OBIDMS column file can only be modified by the process that created it, if its status is set to Open. Those informations are
|
||||
contained in the `header <#header>`_.
|
||||
An OBIDMS column file can only be modified by the process that created it, and while its status is set to Open.
|
||||
|
||||
When a process wants to modify an OBIDMS column file that is closed, it must first clone it. Cloning creates a new version of the
|
||||
file that belongs to the process, i.e., only that process can modify that file, as long as its status is set to Open. Once the process
|
||||
@ -94,6 +109,8 @@ again.
|
||||
That means that one column is stored in one file (if there is only one version)
|
||||
or more (if there are several versions), and that there is one file per version.
|
||||
|
||||
All the versions of one column are stored in one directory.
|
||||
|
||||
|
||||
Versioning
|
||||
----------
|
||||
@ -101,22 +118,22 @@ Versioning
|
||||
The first version of a column file is numbered 0, and each new version increments that
|
||||
number by 1.
|
||||
|
||||
The number of the latest version of an OBIDMS column is stored in an `OBIDMS release file <formats.html#obidms-release-files>`_.
|
||||
The number of the latest version of an OBIDMS column is stored in the `OBIDMS version file <#obidms-version-files>`_ of its directory.
|
||||
|
||||
|
||||
OBIDMS release files
|
||||
OBIDMS version files
|
||||
====================
|
||||
|
||||
Each OBIDMS column is associated with an OBIDMS release file that contains the number of the latest
|
||||
Each OBIDMS column is associated with an OBIDMS version file in its directory, that contains the number of the latest
|
||||
version of the column.
|
||||
|
||||
File name
|
||||
---------
|
||||
|
||||
OBIDMS release files are named with the attribute associated to the data contained in the column, and
|
||||
have the extension ``.odr``.
|
||||
OBIDMS version files are named with the attribute associated to the data contained in the column, and
|
||||
have the extension ``.odv``.
|
||||
|
||||
Example : ``count.odr``
|
||||
Example : ``count.odv``
|
||||
|
||||
|
||||
OBIDMS views
|
||||
@ -140,19 +157,4 @@ operations ever done in the OBIDMS directory and the views in between them :
|
||||
:width: 150 px
|
||||
:align: center
|
||||
|
||||
OBIType header file
|
||||
========================
|
||||
|
||||
.. doxygenfile:: obitypes.h
|
||||
|
||||
|
||||
OBIIntColumn header file
|
||||
========================
|
||||
|
||||
.. doxygenfile:: obiintcolumn.h
|
||||
|
||||
|
||||
OBIColumn header file
|
||||
=====================
|
||||
|
||||
.. doxygenfile:: obicolumn.h
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 67 KiB |
File diff suppressed because it is too large
Load Diff
@ -2,28 +2,28 @@
|
||||
Container types
|
||||
===============
|
||||
|
||||
Containers allow to manage collection of values of homogeneous type.
|
||||
Containers allow to manage collections of values of homogeneous type.
|
||||
Three container types exist.
|
||||
|
||||
A container is a non-mutable structure once it has been locked.
|
||||
Consequently just insert procedure are needed
|
||||
Consequently, only insertion procedures are needed.
|
||||
|
||||
Lists
|
||||
-----
|
||||
|
||||
Correspond to an ordered collection of values belonging an elementary type.
|
||||
Correspond to an ordered collection of values belonging to an elementary type.
|
||||
|
||||
At its creation
|
||||
At its creation, ...
|
||||
|
||||
|
||||
Sets
|
||||
----
|
||||
|
||||
Correspond to an unordered collection of values belonging an elementary type.
|
||||
Correspond to an unordered collection of values belonging to an elementary type.
|
||||
|
||||
|
||||
Dictionaries
|
||||
------------
|
||||
|
||||
Dictionaries allow to associate a `key` to a `value`. Values can be retrieved through its associated key.
|
||||
Values must belong an elementary type and keys must be *OBIStr_t*.
|
||||
Dictionaries allow to associate a `key` to a `value`. Values can be retrieved through their associated key.
|
||||
Values must belong to an elementary type and keys must be *OBIStr_t*.
|
||||
|
@ -2,8 +2,8 @@
|
||||
Data in OBITools3
|
||||
#################
|
||||
|
||||
The OBITools3 inaugure a new way to manage DNA metabarcoding data.
|
||||
They rely on a `Data management System` (DMS) that can be considered as
|
||||
The OBITools3 introduce a new way to manage DNA metabarcoding data.
|
||||
They rely on a `Data management System` (DMS) that can be viewed like
|
||||
a simplified database system.
|
||||
|
||||
|
||||
|
@ -12,7 +12,7 @@ Atomic types
|
||||
========= ========= ============ ==============================
|
||||
integer int32_t OBIInt_t a signed integer value
|
||||
float double OBIFloat_t a floating value
|
||||
boolean ? OBIBool_t a boolean true/false value
|
||||
boolean bool OBIBool_t a boolean true/false value
|
||||
char char OBIChar_t a character
|
||||
index size_t OBIIdx_t an index in a data structure
|
||||
========= ========= ============ ==============================
|
||||
|
@ -63,8 +63,14 @@ Issue tracking
|
||||
==============
|
||||
|
||||
Issue tracking is done using `GitLab <https://about.gitlab.com/>`_ at http://git.metabarcoding.org/.
|
||||
Creating a branch should always lead to the creation of a label that refers to it in GitLab.
|
||||
Tickets should always be labelled with the branches for which they are relevant.
|
||||
Tickets should always be labeled with the branches for which they are relevant.
|
||||
|
||||
|
||||
*************
|
||||
Documentation
|
||||
*************
|
||||
|
||||
C functions are documented in the header files for public functions, and in the source file for private functions.
|
||||
|
||||
|
||||
**************
|
||||
@ -86,7 +92,7 @@ C99 :
|
||||
* Object layer
|
||||
* OBITools3 library
|
||||
|
||||
`Python 3 <https://www.python.org/>`_ :
|
||||
`Python 3.5 <https://www.python.org/>`_ :
|
||||
* Top layer code (scripts)
|
||||
|
||||
For the documentation, `Sphinx <http://sphinx-doc.org/>`_ should be used for both the original
|
||||
@ -99,8 +105,22 @@ in the Sphinx documentation using `Breathe <https://breathe.readthedocs.org/en/l
|
||||
Naming conventions
|
||||
******************
|
||||
|
||||
.. todo::
|
||||
Look for common naming conventions
|
||||
Struct, Enum: ``Title_case``
|
||||
|
||||
Enum members, macros, constants: ``ALL_CAPS``
|
||||
|
||||
Functions, local variables: ``lower_case``
|
||||
|
||||
Public functions: ``obi_lower_case``
|
||||
|
||||
Functions that shouldn't be called directly: ``_lower_case`` (``_`` prefix)
|
||||
|
||||
Global variables: ``g_lower_case`` (``g_`` prefix)
|
||||
|
||||
Pointers: ``pointer_ptr`` (``_ptr`` suffix)
|
||||
|
||||
.. note::
|
||||
Underscores are used to delimit 'words'.
|
||||
|
||||
|
||||
*****************
|
||||
|
@ -11,7 +11,6 @@ OBITools3 documentation
|
||||
|
||||
Programming guidelines <guidelines>
|
||||
Data structures <data>
|
||||
Pistes de reflexion <pistes>
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
@ -1,21 +0,0 @@
|
||||
###################
|
||||
Pistes de reflexion
|
||||
###################
|
||||
|
||||
|
||||
******************************
|
||||
Ce que l'on veut pouvoir faire
|
||||
******************************
|
||||
|
||||
* Gerer les valeurs manquantes
|
||||
* Modifier une colonne en cours d'ecriture (mmap)
|
||||
* Ajouter des valeurs a la fin du fichier d'une colonne en cours d'ecriture (mmap)
|
||||
*
|
||||
|
||||
|
||||
******
|
||||
Divers
|
||||
******
|
||||
|
||||
* Si l'ordre d'une colonne est change, elle est reecrite (pas d'index).
|
||||
* Utilisation de semaphores pour la lecture
|
55
doc/source/specialvalues.rst
Normal file
55
doc/source/specialvalues.rst
Normal file
@ -0,0 +1,55 @@
|
||||
==============
|
||||
Special values
|
||||
==============
|
||||
|
||||
|
||||
NA values
|
||||
=========
|
||||
|
||||
All OBITypes have an associated NA (Not Available) value.
|
||||
NA values are implemented by specifying an explicit NA value for each type,
|
||||
corresponding to the R standards as much as possible:
|
||||
|
||||
* For the type ``OBIInt_t``, the NA value is ``INT_MIN``.
|
||||
|
||||
* For the type ``OBIBool_t``, the NA value is ``2``.
|
||||
|
||||
* For the type ``OBIIdx_t`` and ``OBITaxid_t``, the NA value is ``SIZE_MAX``.
|
||||
|
||||
* For the type ``OBIChar_t``: the NA value is ``\0``.
|
||||
|
||||
* For the type ``OBIFloat_t``::
|
||||
|
||||
typedef union
|
||||
{
|
||||
double value;
|
||||
unsigned int word[2];
|
||||
} ieee_double;
|
||||
|
||||
static double NA_value(void)
|
||||
{
|
||||
volatile ieee_double x;
|
||||
x.word[hw] = 0x7ff00000;
|
||||
x.word[lw] = 1954;
|
||||
return x.value;
|
||||
}
|
||||
|
||||
|
||||
Minimum and maximum values for ``OBIInt_t``
|
||||
===========================================
|
||||
|
||||
* Maximum value : ``INT_MAX``
|
||||
* Minimum value : ``INT_MIN(-1?)``
|
||||
|
||||
|
||||
Infinity values for the type ``OBIFloat_t``
|
||||
===========================================
|
||||
|
||||
* Positive infinity : ``INFINITY`` (should be defined in ``<math.h>``)
|
||||
* Negative infinity : ``-INFINITY``
|
||||
|
||||
|
||||
NaN value for the type ``OBIFloat_t``
|
||||
=====================================
|
||||
|
||||
* NaN (Not a Number) value : ``NAN`` (should be defined in ``<math.h>`` but probably needs to be tested)
|
@ -6,14 +6,10 @@ OBITypes
|
||||
.. image:: ./UML/OBITypes_UML.png
|
||||
:download:`html version of the OBITypes UML file <UML/OBITypes_UML.class.violet.html>`
|
||||
|
||||
.. note::
|
||||
All OBITypes have an associated NA (Not Available) value.
|
||||
We have currently two ideas for implementing NA values:
|
||||
|
||||
- By specifying an explicit NA value for each type
|
||||
- By adding to each column of an OBIDMS a bit vector
|
||||
indicating if the value is defined or not.
|
||||
|
||||
.. image:: ./UML/Obicolumn_classes_UML.png
|
||||
|
||||
:download:`html version of the OBIDMS classes UML file <UML/Obicolumn_classes_UML.class.violet.html>`
|
||||
|
||||
|
||||
.. toctree::
|
||||
@ -21,5 +17,4 @@ OBITypes
|
||||
|
||||
The elementary types <elementary>
|
||||
The containers <containers>
|
||||
|
||||
|
||||
Special values <specialvalues>
|
||||
|
1
doc/sphinx/build_dir.txt
Normal file
1
doc/sphinx/build_dir.txt
Normal file
@ -0,0 +1 @@
|
||||
build/lib.macosx-10.6-intel-3.5
|
50
python/obi.py
Normal file
50
python/obi.py
Normal file
@ -0,0 +1,50 @@
|
||||
#!/usr/local/bin/python3.4
|
||||
'''
|
||||
obi -- shortdesc
|
||||
|
||||
obi is a description
|
||||
|
||||
It defines classes_and_methods
|
||||
|
||||
@author: user_name
|
||||
|
||||
@copyright: 2014 organization_name. All rights reserved.
|
||||
|
||||
@license: license
|
||||
|
||||
@contact: user_email
|
||||
@deffield updated: Updated
|
||||
'''
|
||||
|
||||
|
||||
default_config = { 'software' : "The OBITools",
|
||||
'log' : False,
|
||||
'loglevel' : 'INFO',
|
||||
'progress' : True,
|
||||
'defaultdms' : None
|
||||
}
|
||||
|
||||
root_config_name='obi'
|
||||
|
||||
from obitools3.apps.config import getConfiguration # @UnresolvedImport
|
||||
from obitools3.version import version
|
||||
|
||||
__all__ = []
|
||||
__version__ = version
|
||||
__date__ = '2014-09-28'
|
||||
__updated__ = '2014-09-28'
|
||||
|
||||
DEBUG = 1
|
||||
TESTRUN = 0
|
||||
PROFILE = 0
|
||||
|
||||
|
||||
if __name__ =="__main__":
|
||||
|
||||
config = getConfiguration(root_config_name,
|
||||
default_config)
|
||||
|
||||
config[root_config_name]['module'].run(config)
|
||||
|
||||
|
||||
|
3
python/obitools3/apps/arguments.pxd
Normal file
3
python/obitools3/apps/arguments.pxd
Normal file
@ -0,0 +1,3 @@
|
||||
#cython: language_level=3
|
||||
|
||||
cpdef buildArgumentParser(str configname, str softname)
|
61
python/obitools3/apps/arguments.pyx
Normal file
61
python/obitools3/apps/arguments.pyx
Normal file
@ -0,0 +1,61 @@
|
||||
#cython: language_level=3
|
||||
|
||||
'''
|
||||
Created on 27 mars 2016
|
||||
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
||||
from .command import getCommandsList
|
||||
|
||||
class ObiParser(argparse.ArgumentParser):
|
||||
def error(self, message):
|
||||
sys.stderr.write('error: %s\n' % message)
|
||||
self.print_help()
|
||||
sys.exit(2)
|
||||
|
||||
cpdef buildArgumentParser(str configname,
|
||||
str softname):
|
||||
parser = ObiParser()
|
||||
|
||||
parser.add_argument('--version', dest='%s:version' % configname,
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Print the version of %s' % softname)
|
||||
|
||||
parser.add_argument('--log', dest='%s:log' % configname,
|
||||
action='store',
|
||||
type=str,
|
||||
default=None,
|
||||
help='Create a logfile')
|
||||
|
||||
parser.add_argument('--no-progress', dest='%s:progress' % configname,
|
||||
action='store_false',
|
||||
default=None,
|
||||
help='Do not print the progress bar during analyzes')
|
||||
|
||||
subparsers = parser.add_subparsers(title='subcommands',
|
||||
description='valid subcommands',
|
||||
help='additional help')
|
||||
|
||||
commands = getCommandsList()
|
||||
|
||||
for c in commands:
|
||||
module = commands[c]
|
||||
|
||||
if hasattr(module, "run"):
|
||||
if hasattr(module, "__title__"):
|
||||
sub = subparsers.add_parser(c,help=module.__title__)
|
||||
else:
|
||||
sub = subparsers.add_parser(c)
|
||||
|
||||
if hasattr(module, "addOptions"):
|
||||
module.addOptions(sub)
|
||||
|
||||
sub.set_defaults(**{'%s:module' % configname : module})
|
||||
|
||||
return parser
|
3
python/obitools3/apps/command.pxd
Normal file
3
python/obitools3/apps/command.pxd
Normal file
@ -0,0 +1,3 @@
|
||||
#cython: language_level=3
|
||||
|
||||
cdef object loadCommand(str name,loader)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user