Compare commits
92 Commits
eric-proto
...
cython_bug
Author | SHA1 | Date | |
---|---|---|---|
9b066f4327 | |||
456551ffeb | |||
ecb9d97adb | |||
0eaa5aa784 | |||
21923e213d | |||
6877fc4892 | |||
dbed3d9d1d | |||
fc8bf16769 | |||
e114a3c9cb | |||
ebc9f6f512 | |||
2b3f03ec28 | |||
8fd9c06be2 | |||
b553eef781 | |||
ee4c513fd4 | |||
c013e6ad33 | |||
c98d567e2f | |||
392f110c8d | |||
6ced3c4896 | |||
4b8bf41a71 | |||
c59a244e9d | |||
4b7f2d268b | |||
45af8396b8 | |||
eeebbc77c3 | |||
72155c3b73 | |||
9aaf2d264a | |||
0951934f77 | |||
68b6ab3224 | |||
b0570ee486 | |||
72105b2aed | |||
9ab8a42340 | |||
7b606c0477 | |||
7d7dbb1bf9 | |||
34a3717fb9 | |||
2698022aaf | |||
90bf15186e | |||
5c3bc03bd2 | |||
0e50fbf706 | |||
f2231770f1 | |||
f5937ec019 | |||
52e3f2ce4a | |||
f0dffaff13 | |||
f31d8983bb | |||
a6395ebaf2 | |||
3b7536c0df | |||
34ade161de | |||
48d10ea17c | |||
fdc3b96beb | |||
95dbeb25a0 | |||
9d91e907e5 | |||
3f81dc0173 | |||
8ff3488926 | |||
611699252e | |||
9ad31fddff | |||
e6d96d999a | |||
eaca9c85f1 | |||
bc2ca89088 | |||
a5ed8f0ef0 | |||
3b3267db9a | |||
d53c16cf3e | |||
66f397239b | |||
1fe2e36d5d | |||
ab44e32afb | |||
41f627091f | |||
1e01c9059c | |||
5f62cd8526 | |||
a6abc74500 | |||
5c674715ee | |||
484fcca557 | |||
802f9983c2 | |||
d26abc438d | |||
2162bf4272 | |||
c454f9612e | |||
71cd59f775 | |||
719ef61461 | |||
152b34b5f4 | |||
61b6c3ce83 | |||
0a78a786c1 | |||
e6e68786f7 | |||
58e7066b9f | |||
ed662077b3 | |||
9f9c20aa4a | |||
73ba9fd4c3 | |||
771be89103 | |||
f740557d96 | |||
aff74c72dd | |||
5d967f5b7b | |||
9b6de9c411 | |||
27e246e709 | |||
ff64d1b2e9 | |||
235a81a534 | |||
4759e1cf54 | |||
f4a123cd17 |
@ -10,33 +10,29 @@ from obidistutils.serenity.checksystem import is_mac_system
|
||||
|
||||
class build(ori_build):
|
||||
|
||||
def has_ctools(self):
|
||||
return self.distribution.has_ctools()
|
||||
|
||||
def has_files(self):
|
||||
return self.distribution.has_files()
|
||||
|
||||
def has_executables(self):
|
||||
return self.distribution.has_executables()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_littlebigman(self):
|
||||
return True
|
||||
|
||||
def has_pidname(self):
|
||||
return is_mac_system()
|
||||
|
||||
def has_doc(self):
|
||||
return True
|
||||
|
||||
def has_littlebigman(self):
|
||||
return True
|
||||
|
||||
sub_commands = [('littlebigman', has_littlebigman),
|
||||
('pidname',has_pidname),
|
||||
('build_ctools', has_ctools),
|
||||
('build_files', has_files),
|
||||
('build_cexe', has_executables)] \
|
||||
+ ori_build.sub_commands + \
|
||||
[('build_sphinx',has_doc)]
|
||||
|
||||
try:
|
||||
from obidistutils.command.build_sphinx import build_sphinx # @UnusedImport
|
||||
|
||||
sub_commands = [("littlebigman",has_littlebigman),
|
||||
('pidname',has_pidname)
|
||||
] \
|
||||
+ ori_build.sub_commands + \
|
||||
[('build_sphinx',has_doc)]
|
||||
except ImportError:
|
||||
sub_commands = [("littlebigman",has_littlebigman),
|
||||
('pidname',has_pidname)
|
||||
] \
|
||||
+ ori_build.sub_commands
|
||||
|
||||
|
@ -4,10 +4,11 @@ Created on 20 oct. 2012
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
from obidistutils.command.build_ctools import build_ctools
|
||||
from .build_ctools import build_ctools
|
||||
from .build_exe import build_exe
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
|
||||
import os
|
||||
|
||||
class build_cexe(build_ctools):
|
||||
|
||||
@ -38,7 +39,9 @@ class build_cexe(build_ctools):
|
||||
self.set_undefined_options('build_files',
|
||||
('files', 'built_files'))
|
||||
|
||||
self.executables = self.distribution.executables
|
||||
self.executables = self.distribution.executables
|
||||
# self.build_cexe = os.path.join(os.path.dirname(self.build_cexe),'cbinaries')
|
||||
# self.mkpath(self.build_cexe)
|
||||
|
||||
if self.executables:
|
||||
self.check_executable_list(self.executables)
|
||||
@ -70,4 +73,13 @@ class build_cexe(build_ctools):
|
||||
log.info("%s ok",message)
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
build_exe.run(self)
|
||||
|
||||
|
||||
|
||||
|
@ -5,7 +5,8 @@ Created on 20 oct. 2012
|
||||
'''
|
||||
|
||||
|
||||
from obidistutils.command.build_exe import build_exe
|
||||
from .build_exe import build_exe
|
||||
from distutils import log
|
||||
|
||||
class build_ctools(build_exe):
|
||||
description = "build C/C++ executable not distributed with Python extensions"
|
||||
@ -37,19 +38,26 @@ class build_ctools(build_exe):
|
||||
self.executables = self.distribution.ctools
|
||||
self.check_executable_list(self.executables)
|
||||
|
||||
|
||||
if self.littlebigman =='-DLITTLE_END':
|
||||
if self.define is None:
|
||||
self.define=[('LITTLE_END',None)]
|
||||
else:
|
||||
self.define.append('LITTLE_END',None)
|
||||
|
||||
log.info('Look for CPU architecture... %s',self.define)
|
||||
|
||||
self.ctools = set()
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
|
||||
build_exe.run(self)
|
||||
|
||||
for e,p in self.executables: # @UnusedVariable
|
||||
self.ctools.add(e)
|
||||
|
||||
|
||||
|
||||
|
@ -208,3 +208,4 @@ class build_exe(Command):
|
||||
output_dir=self.build_cexe,
|
||||
debug=self.debug)
|
||||
|
||||
|
||||
|
@ -8,12 +8,16 @@ from distutils import log
|
||||
import os
|
||||
|
||||
from Cython.Distutils import build_ext as ori_build_ext # @UnresolvedImport
|
||||
|
||||
|
||||
from Cython.Compiler import Options as cython_options # @UnresolvedImport
|
||||
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
class build_ext(ori_build_ext):
|
||||
|
||||
|
||||
def modifyDocScripts(self):
|
||||
build_dir_file=open("doc/build_dir.txt","w")
|
||||
build_dir_file=open("doc/sphinx/build_dir.txt","w")
|
||||
print(self.build_lib,file=build_dir_file)
|
||||
build_dir_file.close()
|
||||
|
||||
@ -84,9 +88,25 @@ class build_ext(ori_build_ext):
|
||||
|
||||
def run(self):
|
||||
self.modifyDocScripts()
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
cython_options.annotate = True
|
||||
ori_build_ext.run(self) # @UndefinedVariable
|
||||
|
||||
|
||||
def has_files(self):
|
||||
return self.distribution.has_files()
|
||||
|
||||
def has_executables(self):
|
||||
return self.distribution.has_executables()
|
||||
|
||||
sub_commands = [('build_files',has_files),
|
||||
('build_cexe', has_executables)
|
||||
] + \
|
||||
ori_build_ext.sub_commands
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -28,6 +28,10 @@ class build_files(Command):
|
||||
self.files = {}
|
||||
|
||||
def run(self):
|
||||
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
|
||||
for dest,prog,command in self.distribution.files:
|
||||
destfile = os.path.join(self.build_temp,dest)
|
||||
@ -48,6 +52,12 @@ class build_files(Command):
|
||||
|
||||
log.info("Done.\n")
|
||||
|
||||
def has_ctools(self):
|
||||
return self.distribution.has_ctools()
|
||||
|
||||
|
||||
sub_commands = [('build_ctools', has_ctools)] + \
|
||||
Command.sub_commands
|
||||
|
||||
|
||||
|
||||
|
@ -6,14 +6,14 @@ Created on 20 oct. 2012
|
||||
|
||||
import os.path
|
||||
|
||||
from distutils.command.build_scripts import build_scripts as ori_build_scripts,\
|
||||
first_line_re
|
||||
from distutils.command.build_scripts import build_scripts as ori_build_scripts
|
||||
from distutils.util import convert_path
|
||||
from distutils import log, sysconfig
|
||||
from distutils.dep_util import newer
|
||||
from stat import ST_MODE
|
||||
import re
|
||||
|
||||
|
||||
first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
|
||||
|
||||
class build_scripts(ori_build_scripts):
|
||||
|
||||
|
@ -4,23 +4,24 @@ Created on 10 mars 2015
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
from sphinx.setup_command import BuildDoc as ori_build_sphinx # @UnresolvedImport
|
||||
try:
|
||||
from sphinx.setup_command import BuildDoc as ori_build_sphinx # @UnresolvedImport
|
||||
|
||||
class build_sphinx(ori_build_sphinx):
|
||||
'''Build Sphinx documentation in html, epub and man formats
|
||||
'''
|
||||
|
||||
class build_sphinx(ori_build_sphinx):
|
||||
'''
|
||||
Build Sphinx documentation in html, epub and man formats
|
||||
'''
|
||||
|
||||
description = __doc__
|
||||
|
||||
def run(self):
|
||||
self.builder='html'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='epub'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='man'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
|
||||
description = __doc__
|
||||
|
||||
def run(self):
|
||||
self.builder='html'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='epub'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
self.builder='man'
|
||||
self.finalize_options()
|
||||
ori_build_sphinx.run(self)
|
||||
except ImportError:
|
||||
pass
|
@ -4,11 +4,16 @@ Created on 6 oct. 2014
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.command.install import install as install_ori
|
||||
# except ImportError:
|
||||
# from distutils.command.install import install as install_ori
|
||||
|
||||
from distutils.command.install import install as install_ori
|
||||
|
||||
class install(install_ori):
|
||||
|
||||
def __init__(self,dist):
|
||||
install_ori.__init__(self, dist)
|
||||
self.sub_commands.insert(0, ('build',lambda self: True))
|
||||
# self.sub_commands.insert(0, ('build',lambda self: True))
|
||||
self.sub_commands.append(('install_sphinx',lambda self: self.distribution.serenity))
|
||||
|
@ -3,6 +3,12 @@ Created on 20 oct. 2012
|
||||
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.command.install_scripts import install_scripts as ori_install_scripts
|
||||
# except ImportError:
|
||||
# from distutils.command.install_scripts import install_scripts as ori_install_scripts
|
||||
|
||||
from distutils.command.install_scripts import install_scripts as ori_install_scripts
|
||||
|
||||
import os.path
|
||||
@ -18,12 +24,10 @@ class install_scripts(ori_install_scripts):
|
||||
def install_public_link(self):
|
||||
self.mkpath(self.public_dir)
|
||||
for file in self.get_outputs():
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
else:
|
||||
log.info("exporting file %s -> %s", file,os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
))
|
||||
log.info("exporting file %s -> %s", file,os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
))
|
||||
if not self.dry_run:
|
||||
dest = os.path.join(self.public_dir,
|
||||
os.path.split(file)[1]
|
||||
)
|
||||
|
@ -43,4 +43,19 @@ class install_sphinx(Command):
|
||||
self.copy_file(os.path.join(epub),
|
||||
os.path.join(self.install_doc,os.path.split(epub)[1]))
|
||||
|
||||
def get_outputs(self):
|
||||
directory=os.path.join(self.install_doc,'html')
|
||||
files = [os.path.join(self.install_doc,'html', f)
|
||||
for dp, dn, filenames in os.walk(directory) for f in filenames] # @UnusedVariable
|
||||
|
||||
directory=os.path.join(self.build_dir,'man')
|
||||
files.append(os.path.join(self.install_doc,'man','man1', f)
|
||||
for dp, dn, filenames in os.walk(directory) for f in filenames) # @UnusedVariable
|
||||
|
||||
directory=os.path.join(self.build_dir,'epub')
|
||||
files.append(os.path.join(self.install_doc, f)
|
||||
for dp, dn, filenames in os.walk(directory) # @UnusedVariable
|
||||
for f in glob.glob(os.path.join(dp, '*.epub')) )
|
||||
|
||||
return files
|
||||
|
@ -49,7 +49,7 @@ class littlebigman(build_exe):
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
little = p.communicate()[0]
|
||||
return little
|
||||
return little.decode('latin1')
|
||||
|
||||
def run(self):
|
||||
build_exe.run(self)
|
||||
|
@ -41,6 +41,10 @@ class pidname(build_exe):
|
||||
else:
|
||||
self.executables = []
|
||||
|
||||
# self.build_cexe = os.path.join(os.path.dirname(self.build_cexe),'cbinaries')
|
||||
# self.mkpath(self.build_cexe)
|
||||
|
||||
|
||||
|
||||
def run(self):
|
||||
if is_mac_system():
|
||||
|
@ -9,8 +9,11 @@ import os.path
|
||||
import glob
|
||||
import sys
|
||||
|
||||
# try:
|
||||
# from setuptools.extension import Extension
|
||||
# except ImportError:
|
||||
# from distutils.extension import Extension
|
||||
|
||||
from distutils.core import setup as ori_setup
|
||||
from distutils.extension import Extension
|
||||
|
||||
from obidistutils.serenity.checkpackage import install_requirements,\
|
||||
@ -22,6 +25,7 @@ from obidistutils.serenity.rerun import rerun_with_anothe_python
|
||||
from distutils import log
|
||||
|
||||
from obidistutils.dist import Distribution
|
||||
from obidistutils.serenity import is_serenity
|
||||
|
||||
|
||||
def findPackage(root,base=None):
|
||||
@ -62,7 +66,10 @@ def findCython(root,base=None,pyrexs=None):
|
||||
cfiles = [x for x in cfiles if x[-2:]==".c"]
|
||||
pyrexs[-1].sources.extend(cfiles)
|
||||
pyrexs[-1].include_dirs.extend(incdir)
|
||||
pyrexs[-1].extra_compile_args.extend(['-msse2','-Wno-unused-function'])
|
||||
pyrexs[-1].extra_compile_args.extend(['-msse2',
|
||||
'-Wno-unused-function',
|
||||
'-Wmissing-braces',
|
||||
'-Wchar-subscripts'])
|
||||
|
||||
except IOError:
|
||||
pass
|
||||
@ -78,8 +85,8 @@ def rootname(x):
|
||||
def prepare_commands():
|
||||
from obidistutils.command.build import build
|
||||
from obidistutils.command.littlebigman import littlebigman
|
||||
# from obidistutils.command.serenity import serenity
|
||||
from obidistutils.command.build_cexe import build_cexe
|
||||
from obidistutils.command.build_sphinx import build_sphinx
|
||||
from obidistutils.command.build_ext import build_ext
|
||||
from obidistutils.command.build_ctools import build_ctools
|
||||
from obidistutils.command.build_files import build_files
|
||||
@ -89,8 +96,11 @@ def prepare_commands():
|
||||
from obidistutils.command.install import install
|
||||
from obidistutils.command.pidname import pidname
|
||||
from obidistutils.command.sdist import sdist
|
||||
|
||||
|
||||
|
||||
|
||||
COMMANDS = {'build':build,
|
||||
# 'serenity':serenity,
|
||||
'littlebigman':littlebigman,
|
||||
'pidname':pidname,
|
||||
'build_ctools':build_ctools,
|
||||
@ -98,12 +108,22 @@ def prepare_commands():
|
||||
'build_cexe':build_cexe,
|
||||
'build_ext': build_ext,
|
||||
'build_scripts':build_scripts,
|
||||
'build_sphinx':build_sphinx,
|
||||
'install_scripts':install_scripts,
|
||||
'install_sphinx':install_sphinx,
|
||||
'install':install,
|
||||
'sdist':sdist}
|
||||
|
||||
# try:
|
||||
# from setuptools.commands import egg_info
|
||||
# COMMANDS['egg_info']=egg_info
|
||||
# except ImportError:
|
||||
# pass
|
||||
try:
|
||||
from obidistutils.command.build_sphinx import build_sphinx
|
||||
COMMANDS['build_sphinx']=build_sphinx
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return COMMANDS
|
||||
|
||||
|
||||
@ -139,19 +159,21 @@ def setup(**attrs):
|
||||
del attrs['requirements']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
enforce_good_python(minversion, maxversion, fork)
|
||||
if is_serenity():
|
||||
|
||||
if (install_requirements(requirementfile)):
|
||||
rerun_with_anothe_python(sys.executable,minversion,maxversion,fork)
|
||||
|
||||
enforce_good_python(minversion, maxversion, fork)
|
||||
|
||||
if (install_requirements(requirementfile)):
|
||||
rerun_with_anothe_python(sys.executable,minversion,maxversion,fork)
|
||||
|
||||
|
||||
try:
|
||||
check_requirements(requirementfile)
|
||||
except RequirementError as e :
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
try:
|
||||
check_requirements(requirementfile)
|
||||
except RequirementError as e :
|
||||
log.error(e)
|
||||
sys.exit(1)
|
||||
|
||||
if 'distclass' not in attrs:
|
||||
attrs['distclass']=Distribution
|
||||
@ -193,5 +215,12 @@ def setup(**attrs):
|
||||
|
||||
if 'ext_modules' not in attrs:
|
||||
attrs['ext_modules'] = EXTENTION
|
||||
|
||||
# try:
|
||||
# from setuptools.core import setup as ori_setup
|
||||
# except ImportError:
|
||||
# from distutils.core import setup as ori_setup
|
||||
|
||||
from distutils.core import setup as ori_setup
|
||||
|
||||
ori_setup(**attrs)
|
||||
|
@ -4,6 +4,11 @@ Created on 20 oct. 2012
|
||||
@author: coissac
|
||||
'''
|
||||
|
||||
# try:
|
||||
# from setuptools.dist import Distribution as ori_Distribution
|
||||
# except ImportError:
|
||||
# from distutils.dist import Distribution as ori_Distribution
|
||||
|
||||
from distutils.dist import Distribution as ori_Distribution
|
||||
|
||||
class Distribution(ori_Distribution):
|
||||
@ -29,6 +34,14 @@ class Distribution(ori_Distribution):
|
||||
"By default the name is PACKAGE-VERSION"
|
||||
))
|
||||
|
||||
def run_commands(self):
|
||||
"""Run each command that was seen on the setup script command line.
|
||||
Uses the list of commands found and cache of command objects
|
||||
created by 'get_command_obj()'.
|
||||
"""
|
||||
# self.run_command('littlebigman')
|
||||
ori_Distribution.run_commands(self)
|
||||
|
||||
|
||||
def has_executables(self):
|
||||
return self.executables is not None and self.executables
|
||||
|
@ -11,8 +11,7 @@ import re
|
||||
from distutils.errors import DistutilsError
|
||||
import tempfile
|
||||
|
||||
import importlib
|
||||
import imp
|
||||
from importlib.util import spec_from_file_location # @UnresolvedImport
|
||||
import zipimport
|
||||
|
||||
import argparse
|
||||
@ -106,3 +105,8 @@ def serenity_mode(package,version):
|
||||
return args.serenity
|
||||
|
||||
|
||||
def getVersion(source,main,version):
|
||||
path = os.path.join(source,main,'%s.py' % version)
|
||||
spec = spec_from_file_location('version',path)
|
||||
return spec.loader.load_module().version.strip()
|
||||
|
||||
|
@ -83,6 +83,8 @@ def install_requirements(requirementfile='requirements.txt'):
|
||||
log.info(" Installing requirement : %s" % x)
|
||||
pip_install_package(x)
|
||||
install_something=True
|
||||
if x[0:3]=='pip':
|
||||
return True
|
||||
|
||||
return install_something
|
||||
|
||||
@ -134,7 +136,9 @@ def get_package_requirement(package,requirementfile='requirements.txt'):
|
||||
|
||||
def pip_install_package(package,directory=None,upgrade=True):
|
||||
|
||||
log.info('installing %s in directory %s' % (package,str(directory)))
|
||||
if directory is not None:
|
||||
log.info(' installing %s in directory %s' % (package,str(directory)))
|
||||
|
||||
|
||||
if 'http_proxy' in os.environ and 'https_proxy' not in os.environ:
|
||||
os.environ['https_proxy']=os.environ['http_proxy']
|
||||
@ -144,8 +148,8 @@ def pip_install_package(package,directory=None,upgrade=True):
|
||||
if upgrade:
|
||||
args.append('--upgrade')
|
||||
|
||||
if 'http_proxy' in os.environ:
|
||||
args.append('--proxy=%s' % os.environ['http_proxy'])
|
||||
if 'https_proxy' in os.environ:
|
||||
args.append('--proxy=%s' % os.environ['https_proxy'])
|
||||
|
||||
if directory is not None:
|
||||
args.append('--target=%s' % directory)
|
||||
|
@ -37,7 +37,7 @@ def is_python_version(path=None,minversion='3.4',maxversion=None):
|
||||
stdout=subprocess.PIPE)
|
||||
pythonversion=str(p.communicate()[0],'utf8').strip()
|
||||
pythonversion = StrictVersion(pythonversion)
|
||||
|
||||
|
||||
return ( pythonversion >=StrictVersion(minversion)
|
||||
and ( maxversion is None
|
||||
or pythonversion < StrictVersion(maxversion))
|
||||
@ -91,9 +91,9 @@ def is_a_virtualenv_python(path=None):
|
||||
|
||||
'''
|
||||
if path is None:
|
||||
rep = sys.base_exec_prefix == sys.exec_prefix
|
||||
rep = sys.base_exec_prefix != sys.exec_prefix
|
||||
else:
|
||||
command = """'%s' -c 'import sys; print(sys.base_exec_prefix == sys.exec_prefix)'""" % path
|
||||
command = """'%s' -c 'import sys; print(sys.base_exec_prefix != sys.exec_prefix)'""" % path
|
||||
p = subprocess.Popen(command,
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
|
@ -9,10 +9,10 @@ import sys
|
||||
import venv
|
||||
|
||||
from distutils.errors import DistutilsError
|
||||
from obidistutils.serenity.globals import local_virtualenv # @UnusedImport
|
||||
from obidistutils.serenity.checkpython import which_virtualenv,\
|
||||
is_python_version, \
|
||||
is_a_virtualenv_python
|
||||
from .globals import local_virtualenv # @UnusedImport
|
||||
from .checkpython import which_virtualenv,\
|
||||
is_python_version, \
|
||||
is_a_virtualenv_python
|
||||
|
||||
|
||||
|
||||
@ -39,6 +39,7 @@ def serenity_virtualenv(envname,package,version,minversion='3.4',maxversion=None
|
||||
maxversion=maxversion) and
|
||||
is_a_virtualenv_python(python))
|
||||
|
||||
|
||||
#
|
||||
# The virtualenv already exist but it is not ok
|
||||
#
|
||||
|
2
doc/.gitignore
vendored
2
doc/.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
/build/
|
||||
/doxygen/
|
||||
/build_dir.txt
|
||||
/.DS_Store
|
||||
/.gitignore
|
||||
|
@ -57,7 +57,7 @@ html:
|
||||
@echo "Generating Doxygen documentation..."
|
||||
doxygen Doxyfile
|
||||
@echo "Doxygen documentation generated. \n"
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
$(SPHINXBUILD) -b html -c ./ $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
|
||||
__version__ = '4.0.0'
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
||||
# We can't do the import at the module scope as setup.py has to be able to
|
||||
# import this file to read __version__ without hitting any syntax errors
|
||||
# from both Python 2 & Python 3.
|
||||
|
||||
# By the time this function is called, the directives code will have been
|
||||
# converted with 2to3 if appropriate
|
||||
|
||||
from . import directives
|
||||
|
||||
directives.setup(app)
|
Binary file not shown.
Binary file not shown.
@ -1,88 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen import format_parser_error
|
||||
from ..parser import ParserError, FileIOError
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers import rst
|
||||
|
||||
|
||||
class WarningHandler(object):
|
||||
|
||||
def __init__(self, state, context):
|
||||
self.state = state
|
||||
self.context = context
|
||||
|
||||
def warn(self, raw_text, rendered_nodes=None):
|
||||
raw_text = self.format(raw_text)
|
||||
if rendered_nodes is None:
|
||||
rendered_nodes = [nodes.paragraph("", "", nodes.Text(raw_text))]
|
||||
return [
|
||||
nodes.warning("", *rendered_nodes),
|
||||
self.state.document.reporter.warning(raw_text, line=self.context['lineno'])
|
||||
]
|
||||
|
||||
def format(self, text):
|
||||
return text.format(**self.context)
|
||||
|
||||
|
||||
def create_warning(project_info, state, lineno, **kwargs):
|
||||
|
||||
tail = ''
|
||||
if project_info:
|
||||
tail = 'in doxygen xml output for project "{project}" from directory: {path}'.format(
|
||||
project=project_info.name(),
|
||||
path=project_info.project_path()
|
||||
)
|
||||
|
||||
context = dict(
|
||||
lineno=lineno,
|
||||
tail=tail,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
return WarningHandler(state, context)
|
||||
|
||||
|
||||
class BaseDirective(rst.Directive):
|
||||
|
||||
def __init__(self, root_data_object, renderer_factory_creator_constructor, finder_factory,
|
||||
project_info_factory, filter_factory, target_handler_factory, parser_factory, *args):
|
||||
rst.Directive.__init__(self, *args)
|
||||
self.directive_args = list(args) # Convert tuple to list to allow modification.
|
||||
|
||||
self.root_data_object = root_data_object
|
||||
self.renderer_factory_creator_constructor = renderer_factory_creator_constructor
|
||||
self.finder_factory = finder_factory
|
||||
self.project_info_factory = project_info_factory
|
||||
self.filter_factory = filter_factory
|
||||
self.target_handler_factory = target_handler_factory
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def render(self, node_stack, project_info, options, filter_, target_handler, mask_factory):
|
||||
"Standard render process used by subclasses"
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
options,
|
||||
target_handler
|
||||
)
|
||||
|
||||
try:
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
node_stack,
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
except ParserError as e:
|
||||
return format_parser_error("doxygenclass", e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error("doxygenclass", e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
context = RenderContext(node_stack, mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
return object_renderer.render()
|
Binary file not shown.
@ -1,123 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen.mask import NullMaskFactory
|
||||
from ..directive.base import BaseDirective
|
||||
from ..project import ProjectError
|
||||
from .base import create_warning
|
||||
|
||||
from docutils.parsers.rst.directives import unchanged_required, flag
|
||||
from docutils.parsers import rst
|
||||
|
||||
|
||||
class BaseFileDirective(BaseDirective):
|
||||
"""Base class handle the main work when given the appropriate file and project info to work
|
||||
from.
|
||||
"""
|
||||
|
||||
# We use inheritance here rather than a separate object and composition, because so much
|
||||
# information is present in the Directive class from the docutils framework that we'd have to
|
||||
# pass way too much stuff to a helper object to be reasonable.
|
||||
|
||||
def handle_contents(self, file_, project_info):
|
||||
|
||||
finder = self.finder_factory.create_finder(project_info)
|
||||
|
||||
finder_filter = self.filter_factory.create_file_finder_filter(file_)
|
||||
|
||||
matches = []
|
||||
finder.filter_(finder_filter, matches)
|
||||
|
||||
if len(matches) > 1:
|
||||
warning = create_warning(None, self.state, self.lineno, file=file_,
|
||||
directivename=self.directive_name)
|
||||
return warning.warn('{directivename}: Found multiple matches for file "{file} {tail}')
|
||||
|
||||
elif not matches:
|
||||
warning = create_warning(None, self.state, self.lineno, file=file_,
|
||||
directivename=self.directive_name)
|
||||
return warning.warn('{directivename}: Cannot find file "{file} {tail}')
|
||||
|
||||
target_handler = self.target_handler_factory.create_target_handler(
|
||||
self.options, project_info, self.state.document)
|
||||
filter_ = self.filter_factory.create_file_filter(file_, self.options)
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
self.options,
|
||||
target_handler
|
||||
)
|
||||
node_list = []
|
||||
for node_stack in matches:
|
||||
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
node_stack,
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
|
||||
mask_factory = NullMaskFactory()
|
||||
context = RenderContext(node_stack, mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
node_list.extend(object_renderer.render())
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
class DoxygenFileDirective(BaseFileDirective):
|
||||
|
||||
directive_name = 'doxygenfile'
|
||||
|
||||
required_arguments = 0
|
||||
optional_arguments = 2
|
||||
option_spec = {
|
||||
"path": unchanged_required,
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Get the file from the argument and the project info from the factory."""
|
||||
|
||||
file_ = self.arguments[0]
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.create_project_info(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('doxygenfile: %s' % e)
|
||||
|
||||
return self.handle_contents(file_, project_info)
|
||||
|
||||
|
||||
class AutoDoxygenFileDirective(BaseFileDirective):
|
||||
|
||||
directive_name = 'autodoxygenfile'
|
||||
|
||||
required_arguments = 1
|
||||
option_spec = {
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Get the file from the argument and extract the associated project info for the named
|
||||
project given that it is an auto-project.
|
||||
"""
|
||||
|
||||
file_ = self.arguments[0]
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.retrieve_project_info_for_auto(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('autodoxygenfile: %s' % e)
|
||||
|
||||
return self.handle_contents(file_, project_info)
|
||||
|
Binary file not shown.
@ -1,115 +0,0 @@
|
||||
|
||||
from ..renderer.rst.doxygen.base import RenderContext
|
||||
from ..renderer.rst.doxygen.mask import NullMaskFactory
|
||||
from ..renderer.rst.doxygen import format_parser_error
|
||||
from ..directive.base import BaseDirective
|
||||
from ..project import ProjectError
|
||||
from ..parser import ParserError, FileIOError
|
||||
from .base import create_warning
|
||||
|
||||
from docutils.parsers import rst
|
||||
from docutils.parsers.rst.directives import unchanged_required, flag
|
||||
|
||||
|
||||
class BaseIndexDirective(BaseDirective):
|
||||
"""Base class handle the main work when given the appropriate project info to work from.
|
||||
"""
|
||||
|
||||
# We use inheritance here rather than a separate object and composition, because so much
|
||||
# information is present in the Directive class from the docutils framework that we'd have to
|
||||
# pass way too much stuff to a helper object to be reasonable.
|
||||
|
||||
def handle_contents(self, project_info):
|
||||
|
||||
try:
|
||||
finder = self.finder_factory.create_finder(project_info)
|
||||
except ParserError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
data_object = finder.root()
|
||||
|
||||
target_handler = self.target_handler_factory.create_target_handler(
|
||||
self.options, project_info, self.state.document)
|
||||
filter_ = self.filter_factory.create_index_filter(self.options)
|
||||
|
||||
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
|
||||
project_info,
|
||||
self.state.document,
|
||||
self.options,
|
||||
target_handler
|
||||
)
|
||||
renderer_factory = renderer_factory_creator.create_factory(
|
||||
[data_object],
|
||||
self.state,
|
||||
self.state.document,
|
||||
filter_,
|
||||
target_handler,
|
||||
)
|
||||
|
||||
mask_factory = NullMaskFactory()
|
||||
context = RenderContext([data_object, self.root_data_object], mask_factory, self.directive_args)
|
||||
object_renderer = renderer_factory.create_renderer(context)
|
||||
|
||||
try:
|
||||
node_list = object_renderer.render()
|
||||
except ParserError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state,
|
||||
self.lineno, True)
|
||||
except FileIOError as e:
|
||||
return format_parser_error(self.name, e.error, e.filename, self.state, self.lineno)
|
||||
|
||||
return node_list
|
||||
|
||||
|
||||
class DoxygenIndexDirective(BaseIndexDirective):
|
||||
|
||||
required_arguments = 0
|
||||
optional_arguments = 2
|
||||
option_spec = {
|
||||
"path": unchanged_required,
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Extract the project info and pass it to the helper method"""
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.create_project_info(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('doxygenindex: %s' % e)
|
||||
|
||||
return self.handle_contents(project_info)
|
||||
|
||||
|
||||
class AutoDoxygenIndexDirective(BaseIndexDirective):
|
||||
|
||||
required_arguments = 0
|
||||
final_argument_whitespace = True
|
||||
option_spec = {
|
||||
"project": unchanged_required,
|
||||
"outline": flag,
|
||||
"no-link": flag,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
"""Extract the project info from the auto project info store and pass it to the helper
|
||||
method
|
||||
"""
|
||||
|
||||
try:
|
||||
project_info = self.project_info_factory.retrieve_project_info_for_auto(self.options)
|
||||
except ProjectError as e:
|
||||
warning = create_warning(None, self.state, self.lineno)
|
||||
return warning.warn('autodoxygenindex: %s' % e)
|
||||
|
||||
return self.handle_contents(project_info)
|
||||
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,3 +0,0 @@
|
||||
|
||||
class BreatheError(Exception):
|
||||
pass
|
Binary file not shown.
Binary file not shown.
@ -1,45 +0,0 @@
|
||||
|
||||
class FakeParentNode(object):
|
||||
|
||||
node_type = "fakeparent"
|
||||
|
||||
|
||||
class Finder(object):
|
||||
|
||||
def __init__(self, root, item_finder_factory):
|
||||
|
||||
self._root = root
|
||||
self.item_finder_factory = item_finder_factory
|
||||
|
||||
def filter_(self, filter_, matches):
|
||||
"""Adds all nodes which match the filter into the matches list"""
|
||||
|
||||
item_finder = self.item_finder_factory.create_finder(self._root)
|
||||
item_finder.filter_([FakeParentNode()], filter_, matches)
|
||||
|
||||
def root(self):
|
||||
|
||||
return self._root
|
||||
|
||||
|
||||
class FinderFactory(object):
|
||||
|
||||
def __init__(self, parser, item_finder_factory_creator):
|
||||
|
||||
self.parser = parser
|
||||
self.item_finder_factory_creator = item_finder_factory_creator
|
||||
|
||||
def create_finder(self, project_info):
|
||||
|
||||
root = self.parser.parse(project_info)
|
||||
item_finder_factory = self.item_finder_factory_creator.create_factory(project_info)
|
||||
|
||||
return Finder(root, item_finder_factory)
|
||||
|
||||
def create_finder_from_root(self, root, project_info):
|
||||
|
||||
item_finder_factory = self.item_finder_factory_creator.create_factory(project_info)
|
||||
|
||||
return Finder(root, item_finder_factory)
|
||||
|
||||
|
Binary file not shown.
@ -1 +0,0 @@
|
||||
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
|
||||
class ItemFinder(object):
|
||||
|
||||
def __init__(self, project_info, data_object, item_finder_factory):
|
||||
|
||||
self.data_object = data_object
|
||||
self.item_finder_factory = item_finder_factory
|
||||
self.project_info = project_info
|
||||
|
||||
|
||||
def stack(element, list_):
|
||||
"""Stack an element on to the start of a list and return as a new list"""
|
||||
|
||||
# Copy list first so we have a new list to insert into
|
||||
output = list_[:]
|
||||
output.insert(0, element)
|
||||
return output
|
||||
|
Binary file not shown.
@ -1,75 +0,0 @@
|
||||
|
||||
from .base import ItemFinder, stack
|
||||
|
||||
|
||||
class DoxygenTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
compound_finder = self.item_finder_factory.create_finder(self.data_object.compounddef)
|
||||
compound_finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class CompoundDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Finds nodes which match the filter and continues checks to children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
for sectiondef in self.data_object.sectiondef:
|
||||
finder = self.item_finder_factory.create_finder(sectiondef)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
for innerclass in self.data_object.innerclass:
|
||||
finder = self.item_finder_factory.create_finder(innerclass)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class SectionDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
for memberdef in self.data_object.memberdef:
|
||||
finder = self.item_finder_factory.create_finder(memberdef)
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class MemberDefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
data_object = self.data_object
|
||||
node_stack = stack(data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
if data_object.kind == 'enum':
|
||||
for value in data_object.enumvalue:
|
||||
value_stack = stack(value, node_stack)
|
||||
if filter_.allow(value_stack):
|
||||
matches.append(value_stack)
|
||||
|
||||
|
||||
class RefTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
Binary file not shown.
@ -1,55 +0,0 @@
|
||||
|
||||
from . import index as indexfinder
|
||||
from . import compound as compoundfinder
|
||||
|
||||
|
||||
class CreateCompoundTypeSubFinder(object):
|
||||
|
||||
def __init__(self, parser_factory, matcher_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
self.matcher_factory = matcher_factory
|
||||
|
||||
def __call__(self, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return indexfinder.CompoundTypeSubItemFinder(self.matcher_factory, compound_parser,
|
||||
project_info, *args)
|
||||
|
||||
|
||||
class DoxygenItemFinderFactory(object):
|
||||
|
||||
def __init__(self, finders, project_info):
|
||||
|
||||
self.finders = finders
|
||||
self.project_info = project_info
|
||||
|
||||
def create_finder(self, data_object):
|
||||
|
||||
return self.finders[data_object.node_type](self.project_info, data_object, self)
|
||||
|
||||
|
||||
class DoxygenItemFinderFactoryCreator(object):
|
||||
|
||||
def __init__(self, parser_factory, filter_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
self.filter_factory = filter_factory
|
||||
|
||||
def create_factory(self, project_info):
|
||||
|
||||
finders = {
|
||||
"doxygen": indexfinder.DoxygenTypeSubItemFinder,
|
||||
"compound": CreateCompoundTypeSubFinder(self.parser_factory, self.filter_factory),
|
||||
"member": indexfinder.MemberTypeSubItemFinder,
|
||||
"doxygendef": compoundfinder.DoxygenTypeSubItemFinder,
|
||||
"compounddef": compoundfinder.CompoundDefTypeSubItemFinder,
|
||||
"sectiondef": compoundfinder.SectionDefTypeSubItemFinder,
|
||||
"memberdef": compoundfinder.MemberDefTypeSubItemFinder,
|
||||
"ref": compoundfinder.RefTypeSubItemFinder,
|
||||
}
|
||||
|
||||
return DoxygenItemFinderFactory(finders, project_info)
|
||||
|
||||
|
||||
|
Binary file not shown.
@ -1,79 +0,0 @@
|
||||
|
||||
from .base import ItemFinder, stack
|
||||
|
||||
|
||||
class DoxygenTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Find nodes which match the filter. Doesn't test this node, only its children"""
|
||||
|
||||
compounds = self.data_object.get_compound()
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
for compound in compounds:
|
||||
|
||||
compound_finder = self.item_finder_factory.create_finder(compound)
|
||||
compound_finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class CompoundTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def __init__(self, filter_factory, compound_parser, *args):
|
||||
ItemFinder.__init__(self, *args)
|
||||
|
||||
self.filter_factory = filter_factory
|
||||
self.compound_parser = compound_parser
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
"""Finds nodes which match the filter and continues checks to children
|
||||
|
||||
Requires parsing the xml files referenced by the children for which we use the compound
|
||||
parser and continue at the top level of that pretending that this node is the parent of the
|
||||
top level node of the compound file.
|
||||
"""
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
# Match against compound object
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
||||
|
||||
# Descend to member children
|
||||
members = self.data_object.get_member()
|
||||
member_matches = []
|
||||
for member in members:
|
||||
member_finder = self.item_finder_factory.create_finder(member)
|
||||
member_finder.filter_(node_stack, filter_, member_matches)
|
||||
|
||||
results = []
|
||||
|
||||
# If there are members in this compound that match the criteria
|
||||
# then load up the file for this compound and get the member data objects
|
||||
if member_matches:
|
||||
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
finder = self.item_finder_factory.create_finder(file_data)
|
||||
|
||||
for member_stack in member_matches:
|
||||
ref_filter = self.filter_factory.create_id_filter('memberdef', member_stack[0].refid)
|
||||
finder.filter_(node_stack, ref_filter, matches)
|
||||
|
||||
else:
|
||||
|
||||
# Read in the xml file referenced by the compound and descend into that as well
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
finder = self.item_finder_factory.create_finder(file_data)
|
||||
|
||||
finder.filter_(node_stack, filter_, matches)
|
||||
|
||||
|
||||
class MemberTypeSubItemFinder(ItemFinder):
|
||||
|
||||
def filter_(self, ancestors, filter_, matches):
|
||||
|
||||
node_stack = stack(self.data_object, ancestors)
|
||||
|
||||
# Match against member object
|
||||
if filter_.allow(node_stack):
|
||||
matches.append(node_stack)
|
Binary file not shown.
@ -1,118 +0,0 @@
|
||||
|
||||
import breathe.parser.doxygen.index
|
||||
import breathe.parser.doxygen.compound
|
||||
|
||||
class ParserError(Exception):
|
||||
|
||||
def __init__(self, error, filename):
|
||||
Exception.__init__(self, error)
|
||||
|
||||
self.error = error
|
||||
self.filename = filename
|
||||
|
||||
class FileIOError(Exception):
|
||||
|
||||
def __init__(self, error, filename):
|
||||
Exception.__init__(self, error)
|
||||
|
||||
self.error = error
|
||||
self.filename = filename
|
||||
|
||||
class Parser(object):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
|
||||
self.cache = cache
|
||||
self.path_handler = path_handler
|
||||
self.file_state_cache = file_state_cache
|
||||
|
||||
class DoxygenIndexParser(Parser):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
Parser.__init__(self, cache, path_handler, file_state_cache)
|
||||
|
||||
def parse(self, project_info):
|
||||
|
||||
filename = self.path_handler.resolve_path(
|
||||
project_info.project_path(),
|
||||
"index.xml"
|
||||
)
|
||||
|
||||
self.file_state_cache.update(filename)
|
||||
|
||||
try:
|
||||
# Try to get from our cache
|
||||
return self.cache[filename]
|
||||
except KeyError:
|
||||
|
||||
# If that fails, parse it afresh
|
||||
try:
|
||||
result = breathe.parser.doxygen.index.parse(filename)
|
||||
self.cache[filename] = result
|
||||
return result
|
||||
except breathe.parser.doxygen.index.ParseError as e:
|
||||
raise ParserError(e, filename)
|
||||
except breathe.parser.doxygen.index.FileIOError as e:
|
||||
raise FileIOError(e, filename)
|
||||
|
||||
class DoxygenCompoundParser(Parser):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache, project_info):
|
||||
Parser.__init__(self, cache, path_handler, file_state_cache)
|
||||
|
||||
self.project_info = project_info
|
||||
|
||||
def parse(self, refid):
|
||||
|
||||
filename = self.path_handler.resolve_path(
|
||||
self.project_info.project_path(),
|
||||
"%s.xml" % refid
|
||||
)
|
||||
|
||||
self.file_state_cache.update(filename)
|
||||
|
||||
try:
|
||||
# Try to get from our cache
|
||||
return self.cache[filename]
|
||||
except KeyError:
|
||||
|
||||
# If that fails, parse it afresh
|
||||
try:
|
||||
result = breathe.parser.doxygen.compound.parse(filename)
|
||||
self.cache[filename] = result
|
||||
return result
|
||||
except breathe.parser.doxygen.compound.ParseError as e:
|
||||
raise ParserError(e, filename)
|
||||
except breathe.parser.doxygen.compound.FileIOError as e:
|
||||
raise FileIOError(e, filename)
|
||||
|
||||
class CacheFactory(object):
|
||||
|
||||
def create_cache(self):
|
||||
|
||||
# Return basic dictionary as cache
|
||||
return {}
|
||||
|
||||
class DoxygenParserFactory(object):
|
||||
|
||||
def __init__(self, cache, path_handler, file_state_cache):
|
||||
|
||||
self.cache = cache
|
||||
self.path_handler = path_handler
|
||||
self.file_state_cache = file_state_cache
|
||||
|
||||
def create_index_parser(self):
|
||||
|
||||
return DoxygenIndexParser(self.cache, self.path_handler, self.file_state_cache)
|
||||
|
||||
def create_compound_parser(self, project_info):
|
||||
|
||||
return DoxygenCompoundParser(
|
||||
self.cache,
|
||||
self.path_handler,
|
||||
self.file_state_cache,
|
||||
project_info
|
||||
)
|
||||
|
||||
|
||||
|
Binary file not shown.
Binary file not shown.
@ -1,964 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
|
||||
"""
|
||||
|
||||
from xml.dom import minidom
|
||||
from xml.dom import Node
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
from . import compoundsuper as supermod
|
||||
from .compoundsuper import MixedContainer
|
||||
|
||||
|
||||
class DoxygenTypeSub(supermod.DoxygenType):
|
||||
|
||||
node_type = "doxygendef"
|
||||
|
||||
def __init__(self, version=None, compounddef=None):
|
||||
supermod.DoxygenType.__init__(self, version, compounddef)
|
||||
supermod.DoxygenType.subclass = DoxygenTypeSub
|
||||
# end class DoxygenTypeSub
|
||||
|
||||
|
||||
class compounddefTypeSub(supermod.compounddefType):
|
||||
|
||||
node_type = "compounddef"
|
||||
|
||||
def __init__(self, kind=None, prot=None, id=None, compoundname='', title='',
|
||||
basecompoundref=None, derivedcompoundref=None, includes=None, includedby=None,
|
||||
incdepgraph=None, invincdepgraph=None, innerdir=None, innerfile=None,
|
||||
innerclass=None, innernamespace=None, innerpage=None, innergroup=None,
|
||||
templateparamlist=None, sectiondef=None, briefdescription=None,
|
||||
detaileddescription=None, inheritancegraph=None, collaborationgraph=None,
|
||||
programlisting=None, location=None, listofallmembers=None):
|
||||
|
||||
supermod.compounddefType.__init__(self, kind, prot, id, compoundname, title,
|
||||
basecompoundref, derivedcompoundref, includes, includedby,
|
||||
incdepgraph, invincdepgraph, innerdir, innerfile,
|
||||
innerclass, innernamespace, innerpage, innergroup,
|
||||
templateparamlist, sectiondef, briefdescription,
|
||||
detaileddescription, inheritancegraph, collaborationgraph,
|
||||
programlisting, location, listofallmembers)
|
||||
|
||||
supermod.compounddefType.subclass = compounddefTypeSub
|
||||
# end class compounddefTypeSub
|
||||
|
||||
|
||||
class listofallmembersTypeSub(supermod.listofallmembersType):
|
||||
|
||||
node_type = "listofallmembers"
|
||||
|
||||
def __init__(self, member=None):
|
||||
supermod.listofallmembersType.__init__(self, member)
|
||||
supermod.listofallmembersType.subclass = listofallmembersTypeSub
|
||||
# end class listofallmembersTypeSub
|
||||
|
||||
|
||||
class memberRefTypeSub(supermod.memberRefType):
|
||||
|
||||
node_type = "memberref"
|
||||
|
||||
def __init__(self, virt=None, prot=None, refid=None, ambiguityscope=None, scope='', name=''):
|
||||
supermod.memberRefType.__init__(self, virt, prot, refid, ambiguityscope, scope, name)
|
||||
supermod.memberRefType.subclass = memberRefTypeSub
|
||||
# end class memberRefTypeSub
|
||||
|
||||
|
||||
class compoundRefTypeSub(supermod.compoundRefType):
|
||||
|
||||
node_type = "compoundref"
|
||||
|
||||
def __init__(self, virt=None, prot=None, refid=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.compoundRefType.__init__(self, mixedclass_, content_)
|
||||
supermod.compoundRefType.subclass = compoundRefTypeSub
|
||||
# end class compoundRefTypeSub
|
||||
|
||||
|
||||
class reimplementTypeSub(supermod.reimplementType):
|
||||
|
||||
node_type = "reimplement"
|
||||
|
||||
def __init__(self, refid=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.reimplementType.__init__(self, mixedclass_, content_)
|
||||
supermod.reimplementType.subclass = reimplementTypeSub
|
||||
# end class reimplementTypeSub
|
||||
|
||||
|
||||
class incTypeSub(supermod.incType):
|
||||
|
||||
node_type = "inc"
|
||||
|
||||
def __init__(self, local=None, refid=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.incType.__init__(self, mixedclass_, content_)
|
||||
supermod.incType.subclass = incTypeSub
|
||||
# end class incTypeSub
|
||||
|
||||
|
||||
class refTypeSub(supermod.refType):
|
||||
|
||||
node_type = "ref"
|
||||
|
||||
def __init__(self, node_name, prot=None, refid=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.refType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.node_name = node_name
|
||||
|
||||
supermod.refType.subclass = refTypeSub
|
||||
|
||||
|
||||
class refTextTypeSub(supermod.refTextType):
|
||||
|
||||
node_type = "reftex"
|
||||
|
||||
def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.refTextType.__init__(self, mixedclass_, content_)
|
||||
|
||||
supermod.refTextType.subclass = refTextTypeSub
|
||||
# end class refTextTypeSub
|
||||
|
||||
|
||||
class sectiondefTypeSub(supermod.sectiondefType):
|
||||
|
||||
node_type = "sectiondef"
|
||||
|
||||
def __init__(self, kind=None, header='', description=None, memberdef=None):
|
||||
supermod.sectiondefType.__init__(self, kind, header, description, memberdef)
|
||||
|
||||
supermod.sectiondefType.subclass = sectiondefTypeSub
|
||||
# end class sectiondefTypeSub
|
||||
|
||||
|
||||
class memberdefTypeSub(supermod.memberdefType):
|
||||
|
||||
node_type = "memberdef"
|
||||
|
||||
def __init__(self, initonly=None, kind=None, volatile=None, const=None, raise_=None, virt=None,
|
||||
readable=None, prot=None, explicit=None, new=None, final=None, writable=None,
|
||||
add=None, static=None, remove=None, sealed=None, mutable=None, gettable=None,
|
||||
inline=None, settable=None, id=None, templateparamlist=None, type_=None,
|
||||
definition='', argsstring='', name='', read='', write='', bitfield='',
|
||||
reimplements=None, reimplementedby=None, param=None, enumvalue=None,
|
||||
initializer=None, exceptions=None, briefdescription=None, detaileddescription=None,
|
||||
inbodydescription=None, location=None, references=None, referencedby=None):
|
||||
|
||||
supermod.memberdefType.__init__(self, initonly, kind, volatile, const, raise_, virt,
|
||||
readable, prot, explicit, new, final, writable, add, static,
|
||||
remove, sealed, mutable, gettable, inline, settable, id,
|
||||
templateparamlist, type_, definition, argsstring, name,
|
||||
read, write, bitfield, reimplements, reimplementedby, param,
|
||||
enumvalue, initializer, exceptions, briefdescription,
|
||||
detaileddescription, inbodydescription, location,
|
||||
references, referencedby)
|
||||
|
||||
self.parameterlist = supermod.docParamListType.factory()
|
||||
self.parameterlist.kind = "param"
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.memberdefType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'param':
|
||||
|
||||
# Get latest param
|
||||
param = self.param[-1]
|
||||
|
||||
# If it doesn't have a description we're done
|
||||
if not param.briefdescription:
|
||||
return
|
||||
|
||||
# Construct our own param list from the descriptions stored inline
|
||||
# with the parameters
|
||||
paramdescription = param.briefdescription
|
||||
paramname = supermod.docParamName.factory()
|
||||
|
||||
# Add parameter name
|
||||
obj_ = paramname.mixedclass_(MixedContainer.CategoryText, MixedContainer.TypeNone, '',
|
||||
param.declname)
|
||||
paramname.content_.append(obj_)
|
||||
|
||||
paramnamelist = supermod.docParamNameList.factory()
|
||||
paramnamelist.parametername.append(paramname)
|
||||
|
||||
paramlistitem = supermod.docParamListItem.factory()
|
||||
paramlistitem.parameternamelist.append(paramnamelist)
|
||||
|
||||
# Add parameter description
|
||||
paramlistitem.parameterdescription = paramdescription
|
||||
|
||||
self.parameterlist.parameteritem.append(paramlistitem)
|
||||
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'detaileddescription':
|
||||
|
||||
if not self.parameterlist.parameteritem:
|
||||
# No items in our list
|
||||
return
|
||||
|
||||
# Assume supermod.memberdefType.buildChildren has already built the
|
||||
# description object, we just want to slot our parameterlist in at
|
||||
# a reasonable point
|
||||
|
||||
if not self.detaileddescription:
|
||||
# Create one if it doesn't exist
|
||||
self.detaileddescription = supermod.descriptionType.factory()
|
||||
|
||||
detaileddescription = self.detaileddescription
|
||||
|
||||
para = supermod.docParaType.factory()
|
||||
para.parameterlist.append(self.parameterlist)
|
||||
|
||||
obj_ = detaileddescription.mixedclass_(MixedContainer.CategoryComplex,
|
||||
MixedContainer.TypeNone, 'para', para)
|
||||
|
||||
index = 0
|
||||
detaileddescription.content_.insert(index, obj_)
|
||||
|
||||
|
||||
supermod.memberdefType.subclass = memberdefTypeSub
|
||||
# end class memberdefTypeSub
|
||||
|
||||
|
||||
class descriptionTypeSub(supermod.descriptionType):
|
||||
|
||||
node_type = "description"
|
||||
|
||||
def __init__(self, title='', para=None, sect1=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.descriptionType.__init__(self, mixedclass_, content_)
|
||||
|
||||
supermod.descriptionType.subclass = descriptionTypeSub
|
||||
# end class descriptionTypeSub
|
||||
|
||||
|
||||
class enumvalueTypeSub(supermod.enumvalueType):
|
||||
|
||||
node_type = "enumvalue"
|
||||
|
||||
def __init__(self, prot=None, id=None, name='', initializer=None, briefdescription=None,
|
||||
detaileddescription=None, mixedclass_=None, content_=None):
|
||||
supermod.enumvalueType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.initializer = None
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
# Get text from <name> child and put it in self.name
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'name':
|
||||
value_ = []
|
||||
for text_ in child_.childNodes:
|
||||
value_.append(text_.nodeValue)
|
||||
valuestr_ = ''.join(value_)
|
||||
self.name = valuestr_
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'briefdescription':
|
||||
obj_ = supermod.descriptionType.factory()
|
||||
obj_.build(child_)
|
||||
self.set_briefdescription(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'detaileddescription':
|
||||
obj_ = supermod.descriptionType.factory()
|
||||
obj_.build(child_)
|
||||
self.set_detaileddescription(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'initializer':
|
||||
childobj_ = supermod.linkedTextType.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'initializer', childobj_)
|
||||
self.set_initializer(obj_)
|
||||
self.content_.append(obj_)
|
||||
|
||||
supermod.enumvalueType.subclass = enumvalueTypeSub
|
||||
# end class enumvalueTypeSub
|
||||
|
||||
|
||||
class templateparamlistTypeSub(supermod.templateparamlistType):
|
||||
|
||||
node_type = "templateparamlist"
|
||||
|
||||
def __init__(self, param=None):
|
||||
supermod.templateparamlistType.__init__(self, param)
|
||||
supermod.templateparamlistType.subclass = templateparamlistTypeSub
|
||||
# end class templateparamlistTypeSub
|
||||
|
||||
|
||||
class paramTypeSub(supermod.paramType):
|
||||
|
||||
node_type = "param"
|
||||
|
||||
def __init__(self, type_=None, declname='', defname='', array='', defval=None,
|
||||
briefdescription=None):
|
||||
supermod.paramType.__init__(self, type_, declname, defname, array, defval, briefdescription)
|
||||
supermod.paramType.subclass = paramTypeSub
|
||||
# end class paramTypeSub
|
||||
|
||||
|
||||
class linkedTextTypeSub(supermod.linkedTextType):
|
||||
|
||||
node_type = "linkedtext"
|
||||
|
||||
def __init__(self, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.linkedTextType.__init__(self, mixedclass_, content_)
|
||||
supermod.linkedTextType.subclass = linkedTextTypeSub
|
||||
# end class linkedTextTypeSub
|
||||
|
||||
|
||||
class graphTypeSub(supermod.graphType):
|
||||
|
||||
node_type = "graph"
|
||||
|
||||
def __init__(self, node=None):
|
||||
supermod.graphType.__init__(self, node)
|
||||
supermod.graphType.subclass = graphTypeSub
|
||||
# end class graphTypeSub
|
||||
|
||||
|
||||
class nodeTypeSub(supermod.nodeType):
|
||||
|
||||
node_type = "node"
|
||||
|
||||
def __init__(self, id=None, label='', link=None, childnode=None):
|
||||
supermod.nodeType.__init__(self, id, label, link, childnode)
|
||||
supermod.nodeType.subclass = nodeTypeSub
|
||||
# end class nodeTypeSub
|
||||
|
||||
|
||||
class childnodeTypeSub(supermod.childnodeType):
|
||||
|
||||
node_type = "childnode"
|
||||
|
||||
def __init__(self, relation=None, refid=None, edgelabel=None):
|
||||
supermod.childnodeType.__init__(self, relation, refid, edgelabel)
|
||||
supermod.childnodeType.subclass = childnodeTypeSub
|
||||
# end class childnodeTypeSub
|
||||
|
||||
|
||||
class linkTypeSub(supermod.linkType):
|
||||
|
||||
node_type = "link"
|
||||
|
||||
def __init__(self, refid=None, external=None, valueOf_=''):
|
||||
supermod.linkType.__init__(self, refid, external)
|
||||
supermod.linkType.subclass = linkTypeSub
|
||||
# end class linkTypeSub
|
||||
|
||||
|
||||
class listingTypeSub(supermod.listingType):
|
||||
|
||||
node_type = "listing"
|
||||
|
||||
def __init__(self, codeline=None):
|
||||
supermod.listingType.__init__(self, codeline)
|
||||
supermod.listingType.subclass = listingTypeSub
|
||||
# end class listingTypeSub
|
||||
|
||||
|
||||
class codelineTypeSub(supermod.codelineType):
|
||||
|
||||
node_type = "codeline"
|
||||
|
||||
def __init__(self, external=None, lineno=None, refkind=None, refid=None, highlight=None):
|
||||
supermod.codelineType.__init__(self, external, lineno, refkind, refid, highlight)
|
||||
supermod.codelineType.subclass = codelineTypeSub
|
||||
# end class codelineTypeSub
|
||||
|
||||
|
||||
class highlightTypeSub(supermod.highlightType):
|
||||
|
||||
node_type = "highlight"
|
||||
|
||||
def __init__(self, class_=None, sp=None, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.highlightType.__init__(self, mixedclass_, content_)
|
||||
supermod.highlightType.subclass = highlightTypeSub
|
||||
# end class highlightTypeSub
|
||||
|
||||
|
||||
class referenceTypeSub(supermod.referenceType):
|
||||
|
||||
node_type = "reference"
|
||||
|
||||
def __init__(self, endline=None, startline=None, refid=None, compoundref=None, valueOf_='',
|
||||
mixedclass_=None, content_=None):
|
||||
supermod.referenceType.__init__(self, mixedclass_, content_)
|
||||
supermod.referenceType.subclass = referenceTypeSub
|
||||
# end class referenceTypeSub
|
||||
|
||||
|
||||
class locationTypeSub(supermod.locationType):
|
||||
|
||||
node_type = "location"
|
||||
|
||||
def __init__(self, bodystart=None, line=None, bodyend=None, bodyfile=None, file=None,
|
||||
valueOf_=''):
|
||||
supermod.locationType.__init__(self, bodystart, line, bodyend, bodyfile, file)
|
||||
supermod.locationType.subclass = locationTypeSub
|
||||
# end class locationTypeSub
|
||||
|
||||
|
||||
class docSect1TypeSub(supermod.docSect1Type):
|
||||
|
||||
node_type = "docsect1"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect2=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect1Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect1Type.subclass = docSect1TypeSub
|
||||
# end class docSect1TypeSub
|
||||
|
||||
|
||||
class docSect2TypeSub(supermod.docSect2Type):
|
||||
|
||||
node_type = "docsect2"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect3=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect2Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect2Type.subclass = docSect2TypeSub
|
||||
# end class docSect2TypeSub
|
||||
|
||||
|
||||
class docSect3TypeSub(supermod.docSect3Type):
|
||||
|
||||
node_type = "docsect3"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, sect4=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect3Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect3Type.subclass = docSect3TypeSub
|
||||
# end class docSect3TypeSub
|
||||
|
||||
|
||||
class docSect4TypeSub(supermod.docSect4Type):
|
||||
|
||||
node_type = "docsect4"
|
||||
|
||||
def __init__(self, id=None, title='', para=None, internal=None, mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docSect4Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docSect4Type.subclass = docSect4TypeSub
|
||||
# end class docSect4TypeSub
|
||||
|
||||
|
||||
class docInternalTypeSub(supermod.docInternalType):
|
||||
|
||||
node_type = "docinternal"
|
||||
|
||||
def __init__(self, para=None, sect1=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalType.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalType.subclass = docInternalTypeSub
|
||||
# end class docInternalTypeSub
|
||||
|
||||
|
||||
class docInternalS1TypeSub(supermod.docInternalS1Type):
|
||||
|
||||
node_type = "docinternals1"
|
||||
|
||||
def __init__(self, para=None, sect2=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS1Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS1Type.subclass = docInternalS1TypeSub
|
||||
# end class docInternalS1TypeSub
|
||||
|
||||
|
||||
class docInternalS2TypeSub(supermod.docInternalS2Type):
|
||||
|
||||
node_type = "docinternals2"
|
||||
|
||||
def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS2Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS2Type.subclass = docInternalS2TypeSub
|
||||
# end class docInternalS2TypeSub
|
||||
|
||||
|
||||
class docInternalS3TypeSub(supermod.docInternalS3Type):
|
||||
|
||||
node_type = "docinternals3"
|
||||
|
||||
def __init__(self, para=None, sect3=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS3Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS3Type.subclass = docInternalS3TypeSub
|
||||
# end class docInternalS3TypeSub
|
||||
|
||||
|
||||
class docInternalS4TypeSub(supermod.docInternalS4Type):
|
||||
|
||||
node_type = "docinternals4"
|
||||
|
||||
def __init__(self, para=None, mixedclass_=None, content_=None):
|
||||
supermod.docInternalS4Type.__init__(self, mixedclass_, content_)
|
||||
supermod.docInternalS4Type.subclass = docInternalS4TypeSub
|
||||
# end class docInternalS4TypeSub
|
||||
|
||||
|
||||
class docURLLinkSub(supermod.docURLLink):
|
||||
|
||||
node_type = "docurllink"
|
||||
|
||||
def __init__(self, url=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docURLLink.__init__(self, mixedclass_, content_)
|
||||
supermod.docURLLink.subclass = docURLLinkSub
|
||||
# end class docURLLinkSub
|
||||
|
||||
|
||||
class docAnchorTypeSub(supermod.docAnchorType):
|
||||
|
||||
node_type = "docanchor"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docAnchorType.__init__(self, mixedclass_, content_)
|
||||
supermod.docAnchorType.subclass = docAnchorTypeSub
|
||||
# end class docAnchorTypeSub
|
||||
|
||||
|
||||
class docFormulaTypeSub(supermod.docFormulaType):
|
||||
|
||||
node_type = "docformula"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docFormulaType.__init__(self, mixedclass_, content_)
|
||||
supermod.docFormulaType.subclass = docFormulaTypeSub
|
||||
# end class docFormulaTypeSub
|
||||
|
||||
|
||||
class docIndexEntryTypeSub(supermod.docIndexEntryType):
|
||||
|
||||
node_type = "docindexentry"
|
||||
|
||||
def __init__(self, primaryie='', secondaryie=''):
|
||||
supermod.docIndexEntryType.__init__(self, primaryie, secondaryie)
|
||||
supermod.docIndexEntryType.subclass = docIndexEntryTypeSub
|
||||
# end class docIndexEntryTypeSub
|
||||
|
||||
|
||||
class docListTypeSub(supermod.docListType):
|
||||
|
||||
node_type = "doclist"
|
||||
|
||||
def __init__(self, listitem=None, subtype=""):
|
||||
self.node_subtype = "itemized"
|
||||
if subtype is not "":
|
||||
self.node_subtype = subtype
|
||||
supermod.docListType.__init__(self, listitem)
|
||||
supermod.docListType.subclass = docListTypeSub
|
||||
# end class docListTypeSub
|
||||
|
||||
|
||||
class docListItemTypeSub(supermod.docListItemType):
|
||||
|
||||
node_type = "doclistitem"
|
||||
|
||||
def __init__(self, para=None):
|
||||
supermod.docListItemType.__init__(self, para)
|
||||
supermod.docListItemType.subclass = docListItemTypeSub
|
||||
# end class docListItemTypeSub
|
||||
|
||||
|
||||
class docSimpleSectTypeSub(supermod.docSimpleSectType):
|
||||
|
||||
node_type = "docsimplesect"
|
||||
|
||||
def __init__(self, kind=None, title=None, para=None):
|
||||
supermod.docSimpleSectType.__init__(self, kind, title, para)
|
||||
supermod.docSimpleSectType.subclass = docSimpleSectTypeSub
|
||||
# end class docSimpleSectTypeSub
|
||||
|
||||
|
||||
class docVarListEntryTypeSub(supermod.docVarListEntryType):
|
||||
|
||||
node_type = "docvarlistentry"
|
||||
|
||||
def __init__(self, term=None):
|
||||
supermod.docVarListEntryType.__init__(self, term)
|
||||
supermod.docVarListEntryType.subclass = docVarListEntryTypeSub
|
||||
# end class docVarListEntryTypeSub
|
||||
|
||||
|
||||
class docRefTextTypeSub(supermod.docRefTextType):
|
||||
|
||||
node_type = "docreftext"
|
||||
|
||||
def __init__(self, refid=None, kindref=None, external=None, valueOf_='', mixedclass_=None,
|
||||
content_=None):
|
||||
supermod.docRefTextType.__init__(self, mixedclass_, content_)
|
||||
|
||||
self.para = []
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docRefTextType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'para':
|
||||
obj_ = supermod.docParaType.factory()
|
||||
obj_.build(child_)
|
||||
self.para.append(obj_)
|
||||
|
||||
supermod.docRefTextType.subclass = docRefTextTypeSub
|
||||
# end class docRefTextTypeSub
|
||||
|
||||
|
||||
class docTableTypeSub(supermod.docTableType):
|
||||
|
||||
node_type = "doctable"
|
||||
|
||||
def __init__(self, rows=None, cols=None, row=None, caption=None):
|
||||
supermod.docTableType.__init__(self, rows, cols, row, caption)
|
||||
supermod.docTableType.subclass = docTableTypeSub
|
||||
# end class docTableTypeSub
|
||||
|
||||
|
||||
class docRowTypeSub(supermod.docRowType):
|
||||
|
||||
node_type = "docrow"
|
||||
|
||||
def __init__(self, entry=None):
|
||||
supermod.docRowType.__init__(self, entry)
|
||||
supermod.docRowType.subclass = docRowTypeSub
|
||||
# end class docRowTypeSub
|
||||
|
||||
|
||||
class docEntryTypeSub(supermod.docEntryType):
|
||||
|
||||
node_type = "docentry"
|
||||
|
||||
def __init__(self, thead=None, para=None):
|
||||
supermod.docEntryType.__init__(self, thead, para)
|
||||
supermod.docEntryType.subclass = docEntryTypeSub
|
||||
# end class docEntryTypeSub
|
||||
|
||||
|
||||
class docHeadingTypeSub(supermod.docHeadingType):
|
||||
|
||||
node_type = "docheading"
|
||||
|
||||
def __init__(self, level=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docHeadingType.__init__(self, mixedclass_, content_)
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docHeadingType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
# Account for styled content in the heading. This might need to be expanded to include other
|
||||
# nodes as it seems from the xsd that headings can have a lot of different children but we
|
||||
# really don't expect most of them to come up.
|
||||
if child_.nodeType == Node.ELEMENT_NODE and (
|
||||
nodeName_ == 'bold' or
|
||||
nodeName_ == 'emphasis' or
|
||||
nodeName_ == 'computeroutput' or
|
||||
nodeName_ == 'subscript' or
|
||||
nodeName_ == 'superscript' or
|
||||
nodeName_ == 'center' or
|
||||
nodeName_ == 'small'):
|
||||
obj_ = supermod.docMarkupType.factory()
|
||||
obj_.build(child_)
|
||||
obj_.type_ = nodeName_
|
||||
self.content_.append(obj_)
|
||||
|
||||
supermod.docHeadingType.subclass = docHeadingTypeSub
|
||||
# end class docHeadingTypeSub
|
||||
|
||||
|
||||
class docImageTypeSub(supermod.docImageType):
|
||||
|
||||
node_type = "docimage"
|
||||
|
||||
def __init__(self, width=None, type_=None, name=None, height=None, valueOf_='',
|
||||
mixedclass_=None, content_=None):
|
||||
supermod.docImageType.__init__(self, mixedclass_, content_)
|
||||
supermod.docImageType.subclass = docImageTypeSub
|
||||
# end class docImageTypeSub
|
||||
|
||||
|
||||
class docDotFileTypeSub(supermod.docDotFileType):
|
||||
|
||||
node_type = "docdocfile"
|
||||
|
||||
def __init__(self, name=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docDotFileType.__init__(self, mixedclass_, content_)
|
||||
supermod.docDotFileType.subclass = docDotFileTypeSub
|
||||
# end class docDotFileTypeSub
|
||||
|
||||
|
||||
class docTocItemTypeSub(supermod.docTocItemType):
|
||||
|
||||
node_type = "doctocitem"
|
||||
|
||||
def __init__(self, id=None, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docTocItemType.__init__(self, mixedclass_, content_)
|
||||
supermod.docTocItemType.subclass = docTocItemTypeSub
|
||||
# end class docTocItemTypeSub
|
||||
|
||||
|
||||
class docTocListTypeSub(supermod.docTocListType):
|
||||
|
||||
node_type = "doctoclist"
|
||||
|
||||
def __init__(self, tocitem=None):
|
||||
supermod.docTocListType.__init__(self, tocitem)
|
||||
supermod.docTocListType.subclass = docTocListTypeSub
|
||||
# end class docTocListTypeSub
|
||||
|
||||
|
||||
class docLanguageTypeSub(supermod.docLanguageType):
|
||||
|
||||
node_type = "doclanguage"
|
||||
|
||||
def __init__(self, langid=None, para=None):
|
||||
supermod.docLanguageType.__init__(self, langid, para)
|
||||
supermod.docLanguageType.subclass = docLanguageTypeSub
|
||||
# end class docLanguageTypeSub
|
||||
|
||||
|
||||
class docParamListTypeSub(supermod.docParamListType):
|
||||
|
||||
node_type = "docparamlist"
|
||||
|
||||
def __init__(self, kind=None, parameteritem=None):
|
||||
supermod.docParamListType.__init__(self, kind, parameteritem)
|
||||
supermod.docParamListType.subclass = docParamListTypeSub
|
||||
# end class docParamListTypeSub
|
||||
|
||||
|
||||
class docParamListItemSub(supermod.docParamListItem):
|
||||
|
||||
node_type = "docparamlistitem"
|
||||
|
||||
def __init__(self, parameternamelist=None, parameterdescription=None):
|
||||
supermod.docParamListItem.__init__(self, parameternamelist, parameterdescription)
|
||||
supermod.docParamListItem.subclass = docParamListItemSub
|
||||
# end class docParamListItemSub
|
||||
|
||||
|
||||
class docParamNameListSub(supermod.docParamNameList):
|
||||
|
||||
node_type = "docparamnamelist"
|
||||
|
||||
def __init__(self, parametername=None):
|
||||
supermod.docParamNameList.__init__(self, parametername)
|
||||
supermod.docParamNameList.subclass = docParamNameListSub
|
||||
# end class docParamNameListSub
|
||||
|
||||
|
||||
class docParamNameSub(supermod.docParamName):
|
||||
|
||||
node_type = "docparamname"
|
||||
|
||||
def __init__(self, direction=None, ref=None, mixedclass_=None, content_=None):
|
||||
supermod.docParamName.__init__(self, mixedclass_, content_)
|
||||
supermod.docParamName.subclass = docParamNameSub
|
||||
# end class docParamNameSub
|
||||
|
||||
|
||||
class docXRefSectTypeSub(supermod.docXRefSectType):
|
||||
|
||||
node_type = "docxrefsect"
|
||||
|
||||
def __init__(self, id=None, xreftitle=None, xrefdescription=None):
|
||||
supermod.docXRefSectType.__init__(self, id, xreftitle, xrefdescription)
|
||||
supermod.docXRefSectType.subclass = docXRefSectTypeSub
|
||||
# end class docXRefSectTypeSub
|
||||
|
||||
|
||||
class docCopyTypeSub(supermod.docCopyType):
|
||||
|
||||
node_type = "doccopy"
|
||||
|
||||
def __init__(self, link=None, para=None, sect1=None, internal=None):
|
||||
supermod.docCopyType.__init__(self, link, para, sect1, internal)
|
||||
supermod.docCopyType.subclass = docCopyTypeSub
|
||||
# end class docCopyTypeSub
|
||||
|
||||
|
||||
class docCharTypeSub(supermod.docCharType):
|
||||
|
||||
node_type = "docchar"
|
||||
|
||||
def __init__(self, char=None, valueOf_=''):
|
||||
supermod.docCharType.__init__(self, char)
|
||||
supermod.docCharType.subclass = docCharTypeSub
|
||||
# end class docCharTypeSub
|
||||
|
||||
|
||||
class verbatimTypeSub(object):
|
||||
"""
|
||||
New node type. Structure is largely pillaged from other nodes in order to
|
||||
match the set.
|
||||
"""
|
||||
|
||||
node_type = "verbatim"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
if mixedclass_ is None:
|
||||
self.mixedclass_ = MixedContainer
|
||||
else:
|
||||
self.mixedclass_ = mixedclass_
|
||||
if content_ is None:
|
||||
self.content_ = []
|
||||
else:
|
||||
self.content_ = content_
|
||||
self.text = ""
|
||||
|
||||
def factory(*args, **kwargs):
|
||||
return verbatimTypeSub(*args, **kwargs)
|
||||
|
||||
factory = staticmethod(factory)
|
||||
|
||||
def buildAttributes(self, attrs):
|
||||
pass
|
||||
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
self.valueOf_ = ''
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
self.text += child_.nodeValue
|
||||
|
||||
|
||||
class docParaTypeSub(supermod.docParaType):
|
||||
|
||||
node_type = "docpara"
|
||||
|
||||
def __init__(self, char=None, valueOf_=''):
|
||||
supermod.docParaType.__init__(self, char)
|
||||
|
||||
self.parameterlist = []
|
||||
self.simplesects = []
|
||||
self.content = []
|
||||
self.programlisting = []
|
||||
self.images = []
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
supermod.docParaType.buildChildren(self, child_, nodeName_)
|
||||
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryText,
|
||||
MixedContainer.TypeNone, '', child_.nodeValue)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "ref":
|
||||
obj_ = supermod.docRefTextType.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'parameterlist':
|
||||
obj_ = supermod.docParamListType.factory()
|
||||
obj_.build(child_)
|
||||
self.parameterlist.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'simplesect':
|
||||
obj_ = supermod.docSimpleSectType.factory()
|
||||
obj_.build(child_)
|
||||
self.simplesects.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'programlisting':
|
||||
obj_ = supermod.listingType.factory()
|
||||
obj_.build(child_)
|
||||
self.programlisting.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'image':
|
||||
obj_ = supermod.docImageType.factory()
|
||||
obj_.build(child_)
|
||||
self.images.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and (
|
||||
nodeName_ == 'bold' or
|
||||
nodeName_ == 'emphasis' or
|
||||
nodeName_ == 'computeroutput' or
|
||||
nodeName_ == 'subscript' or
|
||||
nodeName_ == 'superscript' or
|
||||
nodeName_ == 'center' or
|
||||
nodeName_ == 'small'):
|
||||
obj_ = supermod.docMarkupType.factory()
|
||||
obj_.build(child_)
|
||||
obj_.type_ = nodeName_
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'verbatim':
|
||||
childobj_ = verbatimTypeSub.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'verbatim', childobj_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'formula':
|
||||
childobj_ = docFormulaTypeSub.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone,
|
||||
'formula', childobj_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "itemizedlist":
|
||||
obj_ = supermod.docListType.factory(subtype="itemized")
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == "orderedlist":
|
||||
obj_ = supermod.docListType.factory(subtype="ordered")
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'heading':
|
||||
obj_ = supermod.docHeadingType.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'ulink':
|
||||
obj_ = supermod.docURLLink.factory()
|
||||
obj_.build(child_)
|
||||
self.content.append(obj_)
|
||||
|
||||
supermod.docParaType.subclass = docParaTypeSub
|
||||
# end class docParaTypeSub
|
||||
|
||||
|
||||
class docMarkupTypeSub(supermod.docMarkupType):
|
||||
|
||||
node_type = "docmarkup"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docMarkupType.__init__(self, valueOf_, mixedclass_, content_)
|
||||
self.type_ = None
|
||||
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryText, MixedContainer.TypeNone, '',
|
||||
child_.nodeValue)
|
||||
self.content_.append(obj_)
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and nodeName_ == 'ref':
|
||||
childobj_ = supermod.docRefTextType.factory()
|
||||
childobj_.build(child_)
|
||||
obj_ = self.mixedclass_(MixedContainer.CategoryComplex, MixedContainer.TypeNone, 'ref',
|
||||
childobj_)
|
||||
self.content_.append(obj_)
|
||||
if child_.nodeType == Node.TEXT_NODE:
|
||||
self.valueOf_ += child_.nodeValue
|
||||
elif child_.nodeType == Node.CDATA_SECTION_NODE:
|
||||
self.valueOf_ += '![CDATA[' + child_.nodeValue + ']]'
|
||||
|
||||
supermod.docMarkupType.subclass = docMarkupTypeSub
|
||||
# end class docMarkupTypeSub
|
||||
|
||||
|
||||
class docTitleTypeSub(supermod.docTitleType):
|
||||
|
||||
node_type = "doctitle"
|
||||
|
||||
def __init__(self, valueOf_='', mixedclass_=None, content_=None):
|
||||
supermod.docTitleType.__init__(self, valueOf_, mixedclass_, content_)
|
||||
self.type_ = None
|
||||
|
||||
supermod.docTitleType.subclass = docTitleTypeSub
|
||||
# end class docTitleTypeSub
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FileIOError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def parse(inFilename):
|
||||
|
||||
try:
|
||||
doc = minidom.parse(inFilename)
|
||||
except IOError as e:
|
||||
raise FileIOError(e)
|
||||
except ExpatError as e:
|
||||
raise ParseError(e)
|
||||
|
||||
rootNode = doc.documentElement
|
||||
rootObj = supermod.DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
return rootObj
|
||||
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Generated Mon Feb 9 19:08:05 2009 by generateDS.py.
|
||||
"""
|
||||
|
||||
from xml.dom import minidom
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
|
||||
from . import indexsuper as supermod
|
||||
|
||||
class DoxygenTypeSub(supermod.DoxygenType):
|
||||
|
||||
node_type = "doxygen"
|
||||
|
||||
def __init__(self, version=None, compound=None):
|
||||
supermod.DoxygenType.__init__(self, version, compound)
|
||||
supermod.DoxygenType.subclass = DoxygenTypeSub
|
||||
# end class DoxygenTypeSub
|
||||
|
||||
|
||||
class CompoundTypeSub(supermod.CompoundType):
|
||||
|
||||
node_type = "compound"
|
||||
|
||||
def __init__(self, kind=None, refid=None, name='', member=None):
|
||||
supermod.CompoundType.__init__(self, kind, refid, name, member)
|
||||
supermod.CompoundType.subclass = CompoundTypeSub
|
||||
# end class CompoundTypeSub
|
||||
|
||||
|
||||
class MemberTypeSub(supermod.MemberType):
|
||||
|
||||
node_type = "member"
|
||||
|
||||
def __init__(self, kind=None, refid=None, name=''):
|
||||
supermod.MemberType.__init__(self, kind, refid, name)
|
||||
supermod.MemberType.subclass = MemberTypeSub
|
||||
# end class MemberTypeSub
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
class FileIOError(Exception):
|
||||
pass
|
||||
|
||||
def parse(inFilename):
|
||||
|
||||
try:
|
||||
doc = minidom.parse(inFilename)
|
||||
except IOError as e:
|
||||
raise FileIOError(e)
|
||||
except ExpatError as e:
|
||||
raise ParseError(e)
|
||||
|
||||
rootNode = doc.documentElement
|
||||
rootObj = supermod.DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
|
||||
return rootObj
|
||||
|
Binary file not shown.
@ -1,362 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# Generated Thu Jun 11 18:43:54 2009 by generateDS.py.
|
||||
#
|
||||
|
||||
import sys
|
||||
import getopt
|
||||
from xml.dom import minidom
|
||||
from xml.dom import Node
|
||||
|
||||
#
|
||||
# User methods
|
||||
#
|
||||
# Calls to the methods in these classes are generated by generateDS.py.
|
||||
# You can replace these methods by re-implementing the following class
|
||||
# in a module named generatedssuper.py.
|
||||
|
||||
try:
|
||||
from generatedssuper import GeneratedsSuper
|
||||
except ImportError as exp:
|
||||
|
||||
class GeneratedsSuper:
|
||||
def format_string(self, input_data, input_name=''):
|
||||
return input_data
|
||||
def format_integer(self, input_data, input_name=''):
|
||||
return '%d' % input_data
|
||||
def format_float(self, input_data, input_name=''):
|
||||
return '%f' % input_data
|
||||
def format_double(self, input_data, input_name=''):
|
||||
return '%e' % input_data
|
||||
def format_boolean(self, input_data, input_name=''):
|
||||
return '%s' % input_data
|
||||
|
||||
|
||||
#
|
||||
# If you have installed IPython you can uncomment and use the following.
|
||||
# IPython is available from http://ipython.scipy.org/.
|
||||
#
|
||||
|
||||
## from IPython.Shell import IPShellEmbed
|
||||
## args = ''
|
||||
## ipshell = IPShellEmbed(args,
|
||||
## banner = 'Dropping into IPython',
|
||||
## exit_msg = 'Leaving Interpreter, back to program.')
|
||||
|
||||
# Then use the following line where and when you want to drop into the
|
||||
# IPython shell:
|
||||
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
|
||||
|
||||
#
|
||||
# Globals
|
||||
#
|
||||
|
||||
ExternalEncoding = 'ascii'
|
||||
|
||||
#
|
||||
# Support/utility functions.
|
||||
#
|
||||
|
||||
def showIndent(outfile, level):
|
||||
for idx in range(level):
|
||||
outfile.write(' ')
|
||||
|
||||
def quote_xml(inStr):
|
||||
s1 = (isinstance(inStr, basestring) and inStr or
|
||||
'%s' % inStr)
|
||||
s1 = s1.replace('&', '&')
|
||||
s1 = s1.replace('<', '<')
|
||||
s1 = s1.replace('>', '>')
|
||||
return s1
|
||||
|
||||
def quote_attrib(inStr):
|
||||
s1 = (isinstance(inStr, basestring) and inStr or
|
||||
'%s' % inStr)
|
||||
s1 = s1.replace('&', '&')
|
||||
s1 = s1.replace('<', '<')
|
||||
s1 = s1.replace('>', '>')
|
||||
if '"' in s1:
|
||||
if "'" in s1:
|
||||
s1 = '"%s"' % s1.replace('"', """)
|
||||
else:
|
||||
s1 = "'%s'" % s1
|
||||
else:
|
||||
s1 = '"%s"' % s1
|
||||
return s1
|
||||
|
||||
def quote_python(inStr):
|
||||
s1 = inStr
|
||||
if s1.find("'") == -1:
|
||||
if s1.find('\n') == -1:
|
||||
return "'%s'" % s1
|
||||
else:
|
||||
return "'''%s'''" % s1
|
||||
else:
|
||||
if s1.find('"') != -1:
|
||||
s1 = s1.replace('"', '\\"')
|
||||
if s1.find('\n') == -1:
|
||||
return '"%s"' % s1
|
||||
else:
|
||||
return '"""%s"""' % s1
|
||||
|
||||
|
||||
class MixedContainer:
|
||||
# Constants for category:
|
||||
CategoryNone = 0
|
||||
CategoryText = 1
|
||||
CategorySimple = 2
|
||||
CategoryComplex = 3
|
||||
# Constants for content_type:
|
||||
TypeNone = 0
|
||||
TypeText = 1
|
||||
TypeString = 2
|
||||
TypeInteger = 3
|
||||
TypeFloat = 4
|
||||
TypeDecimal = 5
|
||||
TypeDouble = 6
|
||||
TypeBoolean = 7
|
||||
def __init__(self, category, content_type, name, value):
|
||||
self.category = category
|
||||
self.content_type = content_type
|
||||
self.name = name
|
||||
self.value = value
|
||||
def getCategory(self):
|
||||
return self.category
|
||||
def getContenttype(self, content_type):
|
||||
return self.content_type
|
||||
def getValue(self):
|
||||
return self.value
|
||||
def getName(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class _MemberSpec(object):
|
||||
def __init__(self, name='', data_type='', container=0):
|
||||
self.name = name
|
||||
self.data_type = data_type
|
||||
self.container = container
|
||||
def set_name(self, name): self.name = name
|
||||
def get_name(self): return self.name
|
||||
def set_data_type(self, data_type): self.data_type = data_type
|
||||
def get_data_type(self): return self.data_type
|
||||
def set_container(self, container): self.container = container
|
||||
def get_container(self): return self.container
|
||||
|
||||
|
||||
#
|
||||
# Data representation classes.
|
||||
#
|
||||
|
||||
class DoxygenType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, version=None, compound=None):
|
||||
self.version = version
|
||||
if compound is None:
|
||||
self.compound = []
|
||||
else:
|
||||
self.compound = compound
|
||||
def factory(*args_, **kwargs_):
|
||||
if DoxygenType.subclass:
|
||||
return DoxygenType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return DoxygenType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_compound(self): return self.compound
|
||||
def set_compound(self, compound): self.compound = compound
|
||||
def add_compound(self, value): self.compound.append(value)
|
||||
def insert_compound(self, index, value): self.compound[index] = value
|
||||
def get_version(self): return self.version
|
||||
def set_version(self, version): self.version = version
|
||||
def hasContent_(self):
|
||||
if (
|
||||
self.compound is not None
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('version'):
|
||||
self.version = attrs.get('version').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'compound':
|
||||
obj_ = CompoundType.factory()
|
||||
obj_.build(child_)
|
||||
self.compound.append(obj_)
|
||||
# end class DoxygenType
|
||||
|
||||
|
||||
class CompoundType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, kind=None, refid=None, name=None, member=None):
|
||||
self.kind = kind
|
||||
self.refid = refid
|
||||
self.name = name
|
||||
if member is None:
|
||||
self.member = []
|
||||
else:
|
||||
self.member = member
|
||||
def factory(*args_, **kwargs_):
|
||||
if CompoundType.subclass:
|
||||
return CompoundType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return CompoundType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_name(self): return self.name
|
||||
def set_name(self, name): self.name = name
|
||||
def get_member(self): return self.member
|
||||
def set_member(self, member): self.member = member
|
||||
def add_member(self, value): self.member.append(value)
|
||||
def insert_member(self, index, value): self.member[index] = value
|
||||
def get_kind(self): return self.kind
|
||||
def set_kind(self, kind): self.kind = kind
|
||||
def get_refid(self): return self.refid
|
||||
def set_refid(self, refid): self.refid = refid
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('kind'):
|
||||
self.kind = attrs.get('kind').value
|
||||
if attrs.get('refid'):
|
||||
self.refid = attrs.get('refid').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'name':
|
||||
name_ = ''
|
||||
for text__content_ in child_.childNodes:
|
||||
name_ += text__content_.nodeValue
|
||||
self.name = name_
|
||||
elif child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'member':
|
||||
obj_ = MemberType.factory()
|
||||
obj_.build(child_)
|
||||
self.member.append(obj_)
|
||||
# end class CompoundType
|
||||
|
||||
|
||||
class MemberType(GeneratedsSuper):
|
||||
subclass = None
|
||||
superclass = None
|
||||
def __init__(self, kind=None, refid=None, name=None):
|
||||
self.kind = kind
|
||||
self.refid = refid
|
||||
self.name = name
|
||||
def factory(*args_, **kwargs_):
|
||||
if MemberType.subclass:
|
||||
return MemberType.subclass(*args_, **kwargs_)
|
||||
else:
|
||||
return MemberType(*args_, **kwargs_)
|
||||
factory = staticmethod(factory)
|
||||
def get_name(self): return self.name
|
||||
def set_name(self, name): self.name = name
|
||||
def get_kind(self): return self.kind
|
||||
def set_kind(self, kind): self.kind = kind
|
||||
def get_refid(self): return self.refid
|
||||
def set_refid(self, refid): self.refid = refid
|
||||
def hasContent_(self):
|
||||
if (
|
||||
self.name is not None
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
def build(self, node_):
|
||||
attrs = node_.attributes
|
||||
self.buildAttributes(attrs)
|
||||
for child_ in node_.childNodes:
|
||||
nodeName_ = child_.nodeName.split(':')[-1]
|
||||
self.buildChildren(child_, nodeName_)
|
||||
def buildAttributes(self, attrs):
|
||||
if attrs.get('kind'):
|
||||
self.kind = attrs.get('kind').value
|
||||
if attrs.get('refid'):
|
||||
self.refid = attrs.get('refid').value
|
||||
def buildChildren(self, child_, nodeName_):
|
||||
if child_.nodeType == Node.ELEMENT_NODE and \
|
||||
nodeName_ == 'name':
|
||||
name_ = ''
|
||||
for text__content_ in child_.childNodes:
|
||||
name_ += text__content_.nodeValue
|
||||
self.name = name_
|
||||
# end class MemberType
|
||||
|
||||
|
||||
USAGE_TEXT = """
|
||||
Usage: python <Parser>.py [ -s ] <in_xml_file>
|
||||
Options:
|
||||
-s Use the SAX parser, not the minidom parser.
|
||||
"""
|
||||
|
||||
def usage():
|
||||
print(USAGE_TEXT)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def parse(inFileName):
|
||||
doc = minidom.parse(inFileName)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('<?xml version="1.0" ?>\n')
|
||||
rootObj.export(sys.stdout, 0, name_="doxygenindex",
|
||||
namespacedef_='')
|
||||
return rootObj
|
||||
|
||||
|
||||
def parseString(inString):
|
||||
doc = minidom.parseString(inString)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('<?xml version="1.0" ?>\n')
|
||||
rootObj.export(sys.stdout, 0, name_="doxygenindex",
|
||||
namespacedef_='')
|
||||
return rootObj
|
||||
|
||||
|
||||
def parseLiteral(inFileName):
|
||||
doc = minidom.parse(inFileName)
|
||||
rootNode = doc.documentElement
|
||||
rootObj = DoxygenType.factory()
|
||||
rootObj.build(rootNode)
|
||||
# Enable Python to collect the space used by the DOM.
|
||||
doc = None
|
||||
sys.stdout.write('from index import *\n\n')
|
||||
sys.stdout.write('rootObj = doxygenindex(\n')
|
||||
rootObj.exportLiteral(sys.stdout, 0, name_="doxygenindex")
|
||||
sys.stdout.write(')\n')
|
||||
return rootObj
|
||||
|
||||
|
||||
def main():
|
||||
args = sys.argv[1:]
|
||||
if len(args) == 1:
|
||||
parse(args[0])
|
||||
else:
|
||||
usage()
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
#import pdb
|
||||
#pdb.run('main()')
|
||||
|
Binary file not shown.
@ -1,87 +0,0 @@
|
||||
|
||||
AUTOCFG_TEMPLATE = r"""
|
||||
PROJECT_NAME = "{project_name}"
|
||||
OUTPUT_DIRECTORY = {output_dir}
|
||||
GENERATE_LATEX = NO
|
||||
GENERATE_MAN = NO
|
||||
GENERATE_RTF = NO
|
||||
CASE_SENSE_NAMES = NO
|
||||
INPUT = {input}
|
||||
ENABLE_PREPROCESSING = YES
|
||||
QUIET = YES
|
||||
JAVADOC_AUTOBRIEF = YES
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
GENERATE_HTML = NO
|
||||
GENERATE_XML = YES
|
||||
ALIASES = "rst=\verbatim embed:rst"
|
||||
ALIASES += "endrst=\endverbatim"
|
||||
""".strip()
|
||||
|
||||
|
||||
class ProjectData(object):
|
||||
"Simple handler for the files and project_info for each project"
|
||||
|
||||
def __init__(self, auto_project_info, files):
|
||||
|
||||
self.auto_project_info = auto_project_info
|
||||
self.files = files
|
||||
|
||||
|
||||
class AutoDoxygenProcessHandle(object):
|
||||
|
||||
def __init__(self, path_handler, run_process, write_file, project_info_factory):
|
||||
|
||||
self.path_handler = path_handler
|
||||
self.run_process = run_process
|
||||
self.write_file = write_file
|
||||
self.project_info_factory = project_info_factory
|
||||
|
||||
def generate_xml(self, app):
|
||||
|
||||
project_files = {}
|
||||
|
||||
# First collect together all the files which need to be doxygen processed for each project
|
||||
for project_name, file_structure in app.config.breathe_projects_source.items():
|
||||
|
||||
folder = file_structure[0]
|
||||
contents = file_structure[1]
|
||||
|
||||
auto_project_info = self.project_info_factory.create_auto_project_info(
|
||||
project_name, folder)
|
||||
|
||||
project_files[project_name] = ProjectData(auto_project_info, contents)
|
||||
|
||||
# Iterate over the projects and generate doxygen xml output for the files for each one into
|
||||
# a directory in the Sphinx build area
|
||||
for project_name, data in project_files.items():
|
||||
|
||||
project_path = self.process(data.auto_project_info, data.files)
|
||||
|
||||
project_info = data.auto_project_info.create_project_info(project_path)
|
||||
|
||||
self.project_info_factory.store_project_info_for_auto(project_name, project_info)
|
||||
|
||||
def process(self, auto_project_info, files):
|
||||
|
||||
name = auto_project_info.name()
|
||||
cfgfile = "%s.cfg" % name
|
||||
|
||||
full_paths = map(lambda x: auto_project_info.abs_path_to_source_file(x), files)
|
||||
|
||||
cfg = AUTOCFG_TEMPLATE.format(
|
||||
project_name=name,
|
||||
output_dir=name,
|
||||
input=" ".join(full_paths)
|
||||
)
|
||||
|
||||
build_dir = self.path_handler.join(
|
||||
auto_project_info.build_dir(),
|
||||
"breathe",
|
||||
"doxygen"
|
||||
)
|
||||
|
||||
self.write_file(build_dir, cfgfile, cfg)
|
||||
|
||||
self.run_process(['doxygen', cfgfile], cwd=build_dir)
|
||||
|
||||
return self.path_handler.join(build_dir, name, "xml")
|
Binary file not shown.
@ -1,306 +0,0 @@
|
||||
|
||||
from .exception import BreatheError
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class ProjectError(BreatheError):
|
||||
pass
|
||||
|
||||
|
||||
class NoDefaultProjectError(ProjectError):
|
||||
pass
|
||||
|
||||
|
||||
class AutoProjectInfo(object):
|
||||
"""Created as a temporary step in the automatic xml generation process"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
source_path,
|
||||
build_dir,
|
||||
reference,
|
||||
source_dir,
|
||||
config_dir,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
match
|
||||
):
|
||||
|
||||
self._name = name
|
||||
self._source_path = source_path
|
||||
self._build_dir = build_dir
|
||||
self._reference = reference
|
||||
self._source_dir = source_dir
|
||||
self._config_dir = config_dir
|
||||
self._domain_by_extension = domain_by_extension
|
||||
self._domain_by_file_pattern = domain_by_file_pattern
|
||||
self._match = match
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def build_dir(self):
|
||||
return self._build_dir
|
||||
|
||||
def abs_path_to_source_file(self, file_):
|
||||
"""
|
||||
Returns full path to the provide file assuming that the provided path is relative to the
|
||||
projects conf.py directory as specified in the breathe_projects_source config variable.
|
||||
"""
|
||||
|
||||
# os.path.join does the appropriate handling if _source_path is an absolute path
|
||||
return os.path.join(self._config_dir, self._source_path, file_)
|
||||
|
||||
def create_project_info(self, project_path):
|
||||
"""Creates a proper ProjectInfo object based on the information in this AutoProjectInfo"""
|
||||
|
||||
return ProjectInfo(
|
||||
self._name,
|
||||
project_path,
|
||||
self._source_path,
|
||||
self._reference,
|
||||
self._source_dir,
|
||||
self._config_dir,
|
||||
self._domain_by_extension,
|
||||
self._domain_by_file_pattern,
|
||||
self._match
|
||||
)
|
||||
|
||||
|
||||
class ProjectInfo(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
path,
|
||||
source_path,
|
||||
reference,
|
||||
source_dir,
|
||||
config_dir,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
match
|
||||
):
|
||||
|
||||
self._name = name
|
||||
self._project_path = path
|
||||
self._source_path = source_path
|
||||
self._reference = reference
|
||||
self._source_dir = source_dir
|
||||
self._config_dir = config_dir
|
||||
self._domain_by_extension = domain_by_extension
|
||||
self._domain_by_file_pattern = domain_by_file_pattern
|
||||
self._match = match
|
||||
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
def project_path(self):
|
||||
return self._project_path
|
||||
|
||||
def source_path(self):
|
||||
return self._source_path
|
||||
|
||||
def relative_path_to_xml_file(self, file_):
|
||||
"""
|
||||
Returns relative path from Sphinx documentation top-level source directory to the specified
|
||||
file assuming that the specified file is a path relative to the doxygen xml output
|
||||
directory.
|
||||
"""
|
||||
|
||||
# os.path.join does the appropriate handling if _project_path is an absolute path
|
||||
full_xml_project_path = os.path.join(self._config_dir, self._project_path, file_)
|
||||
|
||||
return os.path.relpath(
|
||||
full_xml_project_path,
|
||||
self._source_dir
|
||||
)
|
||||
|
||||
def sphinx_abs_path_to_file(self, file_):
|
||||
"""
|
||||
Prepends os.path.sep to the value returned by relative_path_to_file.
|
||||
|
||||
This is to match Sphinx's concept of an absolute path which starts from the top-level source
|
||||
directory of the project.
|
||||
"""
|
||||
return os.path.sep + self.relative_path_to_xml_file(file_)
|
||||
|
||||
def reference(self):
|
||||
return self._reference
|
||||
|
||||
def domain_for_file(self, file_):
|
||||
|
||||
domain = ""
|
||||
extension = file_.split(".")[-1]
|
||||
|
||||
try:
|
||||
domain = self._domain_by_extension[extension]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
for pattern, pattern_domain in self._domain_by_file_pattern.items():
|
||||
if self._match(file_, pattern):
|
||||
domain = pattern_domain
|
||||
|
||||
return domain
|
||||
|
||||
|
||||
class ProjectInfoFactory(object):
|
||||
|
||||
def __init__(self, source_dir, build_dir, config_dir, match):
|
||||
|
||||
self.source_dir = source_dir
|
||||
self.build_dir = build_dir
|
||||
self.config_dir = config_dir
|
||||
self.match = match
|
||||
|
||||
self.projects = {}
|
||||
self.default_project = None
|
||||
self.domain_by_extension = {}
|
||||
self.domain_by_file_pattern = {}
|
||||
|
||||
self.project_count = 0
|
||||
self.project_info_store = {}
|
||||
self.project_info_for_auto_store = {}
|
||||
self.auto_project_info_store = {}
|
||||
|
||||
def update(
|
||||
self,
|
||||
projects,
|
||||
default_project,
|
||||
domain_by_extension,
|
||||
domain_by_file_pattern,
|
||||
projects_source,
|
||||
build_dir
|
||||
):
|
||||
|
||||
self.projects = projects
|
||||
self.default_project = default_project
|
||||
self.domain_by_extension = domain_by_extension
|
||||
self.domain_by_file_pattern = domain_by_file_pattern
|
||||
self.projects_source = projects_source
|
||||
|
||||
# If the breathe config values has a non-empty value for build_dir then use that otherwise
|
||||
# stick with the default
|
||||
if build_dir:
|
||||
self.build_dir = build_dir
|
||||
|
||||
def default_path(self):
|
||||
|
||||
if not self.default_project:
|
||||
raise NoDefaultProjectError(
|
||||
"No breathe_default_project config setting to fall back on "
|
||||
"for directive with no 'project' or 'path' specified."
|
||||
)
|
||||
|
||||
try:
|
||||
return self.projects[self.default_project]
|
||||
except KeyError:
|
||||
raise ProjectError(
|
||||
("breathe_default_project value '%s' does not seem to be a valid key for the "
|
||||
"breathe_projects dictionary") % self.default_project
|
||||
)
|
||||
|
||||
def create_project_info(self, options):
|
||||
|
||||
name = ""
|
||||
|
||||
if "project" in options:
|
||||
try:
|
||||
path = self.projects[options["project"]]
|
||||
name = options["project"]
|
||||
except KeyError:
|
||||
raise ProjectError("Unable to find project '%s' in breathe_projects dictionary"
|
||||
% options["project"])
|
||||
|
||||
elif "path" in options:
|
||||
path = options["path"]
|
||||
|
||||
else:
|
||||
path = self.default_path()
|
||||
|
||||
try:
|
||||
return self.project_info_store[path]
|
||||
except KeyError:
|
||||
|
||||
reference = name
|
||||
|
||||
if not name:
|
||||
name = "project%s" % self.project_count
|
||||
reference = path
|
||||
self.project_count += 1
|
||||
|
||||
project_info = ProjectInfo(
|
||||
name,
|
||||
path,
|
||||
"NoSourcePath",
|
||||
reference,
|
||||
self.source_dir,
|
||||
self.config_dir,
|
||||
self.domain_by_extension,
|
||||
self.domain_by_file_pattern,
|
||||
self.match
|
||||
)
|
||||
|
||||
self.project_info_store[path] = project_info
|
||||
|
||||
return project_info
|
||||
|
||||
def store_project_info_for_auto(self, name, project_info):
|
||||
"""Stores the project info by name for later extraction by the auto directives.
|
||||
|
||||
Stored separately to the non-auto project info objects as they should never overlap.
|
||||
"""
|
||||
|
||||
self.project_info_for_auto_store[name] = project_info
|
||||
|
||||
def retrieve_project_info_for_auto(self, options):
|
||||
"""Retrieves the project info by name for later extraction by the auto directives.
|
||||
|
||||
Looks for the 'project' entry in the options dictionary. This is a less than ideal API but
|
||||
it is designed to match the use of 'create_project_info' above for which it makes much more
|
||||
sense.
|
||||
"""
|
||||
|
||||
name = options.get('project', self.default_project)
|
||||
|
||||
if name is None:
|
||||
raise NoDefaultProjectError(
|
||||
"No breathe_default_project config setting to fall back on "
|
||||
"for directive with no 'project' or 'path' specified."
|
||||
)
|
||||
|
||||
return self.project_info_for_auto_store[name]
|
||||
|
||||
def create_auto_project_info(self, name, source_path):
|
||||
|
||||
key = source_path
|
||||
|
||||
try:
|
||||
return self.auto_project_info_store[key]
|
||||
except KeyError:
|
||||
|
||||
reference = name
|
||||
|
||||
if not name:
|
||||
name = "project%s" % self.project_count
|
||||
reference = source_path
|
||||
self.project_count += 1
|
||||
|
||||
auto_project_info = AutoProjectInfo(
|
||||
name,
|
||||
source_path,
|
||||
self.build_dir,
|
||||
reference,
|
||||
self.source_dir,
|
||||
self.config_dir,
|
||||
self.domain_by_extension,
|
||||
self.domain_by_file_pattern,
|
||||
self.match
|
||||
)
|
||||
|
||||
self.auto_project_info_store[key] = auto_project_info
|
||||
|
||||
return auto_project_info
|
Binary file not shown.
Binary file not shown.
@ -1,2 +0,0 @@
|
||||
|
||||
|
Binary file not shown.
@ -1,383 +0,0 @@
|
||||
|
||||
from .base import Renderer, RenderContext
|
||||
from . import index as indexrenderer
|
||||
from . import compound as compoundrenderer
|
||||
|
||||
from docutils import nodes
|
||||
import textwrap
|
||||
|
||||
class RstContentCreator(object):
|
||||
|
||||
def __init__(self, list_type, dedent):
|
||||
|
||||
self.list_type = list_type
|
||||
self.dedent = dedent
|
||||
|
||||
def __call__(self, text):
|
||||
|
||||
# Remove the first line which is "embed:rst[:leading-asterisk]"
|
||||
text = "\n".join(text.split(u"\n")[1:])
|
||||
|
||||
# Remove starting whitespace
|
||||
text = self.dedent(text)
|
||||
|
||||
# Inspired by autodoc.py in Sphinx
|
||||
result = self.list_type()
|
||||
for line in text.split("\n"):
|
||||
result.append(line, "<breathe>")
|
||||
|
||||
return result
|
||||
|
||||
class UnicodeRenderer(Renderer):
|
||||
|
||||
def render(self):
|
||||
|
||||
# Skip any nodes that are pure whitespace
|
||||
# Probably need a better way to do this as currently we're only doing
|
||||
# it skip whitespace between higher-level nodes, but this will also
|
||||
# skip any pure whitespace entries in actual content nodes
|
||||
#
|
||||
# We counter that second issue slightly by allowing through single white spaces
|
||||
#
|
||||
if self.data_object.strip():
|
||||
return [self.node_factory.Text(self.data_object)]
|
||||
elif self.data_object == unicode(" "):
|
||||
return [self.node_factory.Text(self.data_object)]
|
||||
else:
|
||||
return []
|
||||
|
||||
class NullRenderer(Renderer):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def render(self):
|
||||
return []
|
||||
|
||||
|
||||
class DoxygenToRstRendererFactory(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_type,
|
||||
renderers,
|
||||
renderer_factory_creator,
|
||||
node_factory,
|
||||
project_info,
|
||||
state,
|
||||
document,
|
||||
rst_content_creator,
|
||||
filter_,
|
||||
target_handler,
|
||||
domain_directive_factory
|
||||
):
|
||||
|
||||
self.node_type = node_type
|
||||
self.node_factory = node_factory
|
||||
self.project_info = project_info
|
||||
self.renderers = renderers
|
||||
self.renderer_factory_creator = renderer_factory_creator
|
||||
self.state = state
|
||||
self.document = document
|
||||
self.rst_content_creator = rst_content_creator
|
||||
self.filter_ = filter_
|
||||
self.target_handler = target_handler
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
|
||||
def create_renderer(
|
||||
self,
|
||||
context
|
||||
):
|
||||
|
||||
parent_data_object = context.node_stack[1]
|
||||
data_object = context.node_stack[0]
|
||||
|
||||
if not self.filter_.allow(context.node_stack):
|
||||
return NullRenderer()
|
||||
|
||||
child_renderer_factory = self.renderer_factory_creator.create_child_factory(
|
||||
self.project_info,
|
||||
data_object,
|
||||
self
|
||||
)
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
Renderer = self.renderers[node_type]
|
||||
|
||||
common_args = [
|
||||
self.project_info,
|
||||
context,
|
||||
child_renderer_factory,
|
||||
self.node_factory,
|
||||
self.state,
|
||||
self.document,
|
||||
self.target_handler,
|
||||
self.domain_directive_factory
|
||||
]
|
||||
|
||||
if node_type == "docmarkup":
|
||||
|
||||
creator = self.node_factory.inline
|
||||
if data_object.type_ == "emphasis":
|
||||
creator = self.node_factory.emphasis
|
||||
elif data_object.type_ == "computeroutput":
|
||||
creator = self.node_factory.literal
|
||||
elif data_object.type_ == "bold":
|
||||
creator = self.node_factory.strong
|
||||
elif data_object.type_ == "superscript":
|
||||
creator = self.node_factory.superscript
|
||||
elif data_object.type_ == "subscript":
|
||||
creator = self.node_factory.subscript
|
||||
elif data_object.type_ == "center":
|
||||
print("Warning: does not currently handle 'center' text display")
|
||||
elif data_object.type_ == "small":
|
||||
print("Warning: does not currently handle 'small' text display")
|
||||
|
||||
return Renderer(
|
||||
creator,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "verbatim":
|
||||
|
||||
return Renderer(
|
||||
self.rst_content_creator,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "compound":
|
||||
|
||||
kind = data_object.kind
|
||||
if kind in ["file", "dir", "page", "example", "group"]:
|
||||
return Renderer(indexrenderer.FileRenderer, *common_args)
|
||||
|
||||
class_ = indexrenderer.CompoundTypeSubRenderer
|
||||
|
||||
# For compound node types Renderer is CreateCompoundTypeSubRenderer
|
||||
# as defined below. This could be cleaner
|
||||
return Renderer(
|
||||
class_,
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "memberdef":
|
||||
|
||||
if data_object.kind in ("function", "slot") or (data_object.kind == 'friend' and data_object.argsstring):
|
||||
Renderer = compoundrenderer.FuncMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "enum":
|
||||
Renderer = compoundrenderer.EnumMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "typedef":
|
||||
Renderer = compoundrenderer.TypedefMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "variable":
|
||||
Renderer = compoundrenderer.VariableMemberDefTypeSubRenderer
|
||||
elif data_object.kind == "define":
|
||||
Renderer = compoundrenderer.DefineMemberDefTypeSubRenderer
|
||||
|
||||
if node_type == "param":
|
||||
return Renderer(
|
||||
parent_data_object.node_type != "templateparamlist",
|
||||
*common_args
|
||||
)
|
||||
|
||||
if node_type == "docsimplesect":
|
||||
if data_object.kind == "par":
|
||||
Renderer = compoundrenderer.ParDocSimpleSectTypeSubRenderer
|
||||
|
||||
return Renderer(
|
||||
*common_args
|
||||
)
|
||||
|
||||
class CreateCompoundTypeSubRenderer(object):
|
||||
|
||||
def __init__(self, parser_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def __call__(self, class_, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return class_(compound_parser, project_info, *args)
|
||||
|
||||
|
||||
class CreateRefTypeSubRenderer(object):
|
||||
|
||||
def __init__(self, parser_factory):
|
||||
|
||||
self.parser_factory = parser_factory
|
||||
|
||||
def __call__(self, project_info, *args):
|
||||
|
||||
compound_parser = self.parser_factory.create_compound_parser(project_info)
|
||||
return compoundrenderer.RefTypeSubRenderer(compound_parser, project_info, *args)
|
||||
|
||||
|
||||
class DoxygenToRstRendererFactoryCreator(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_factory,
|
||||
parser_factory,
|
||||
domain_directive_factory,
|
||||
rst_content_creator,
|
||||
project_info
|
||||
):
|
||||
|
||||
self.node_factory = node_factory
|
||||
self.parser_factory = parser_factory
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
self.rst_content_creator = rst_content_creator
|
||||
self.project_info = project_info
|
||||
|
||||
def create_factory(self, node_stack, state, document, filter_, target_handler):
|
||||
|
||||
data_object = node_stack[0]
|
||||
|
||||
renderers = {
|
||||
"doxygen" : indexrenderer.DoxygenTypeSubRenderer,
|
||||
"compound" : CreateCompoundTypeSubRenderer(self.parser_factory),
|
||||
"doxygendef" : compoundrenderer.DoxygenTypeSubRenderer,
|
||||
"compounddef" : compoundrenderer.CompoundDefTypeSubRenderer,
|
||||
"sectiondef" : compoundrenderer.SectionDefTypeSubRenderer,
|
||||
"memberdef" : compoundrenderer.MemberDefTypeSubRenderer,
|
||||
"enumvalue" : compoundrenderer.EnumvalueTypeSubRenderer,
|
||||
"linkedtext" : compoundrenderer.LinkedTextTypeSubRenderer,
|
||||
"description" : compoundrenderer.DescriptionTypeSubRenderer,
|
||||
"param" : compoundrenderer.ParamTypeSubRenderer,
|
||||
"docreftext" : compoundrenderer.DocRefTextTypeSubRenderer,
|
||||
"docheading" : compoundrenderer.DocHeadingTypeSubRenderer,
|
||||
"docpara" : compoundrenderer.DocParaTypeSubRenderer,
|
||||
"docmarkup" : compoundrenderer.DocMarkupTypeSubRenderer,
|
||||
"docparamlist" : compoundrenderer.DocParamListTypeSubRenderer,
|
||||
"docparamlistitem" : compoundrenderer.DocParamListItemSubRenderer,
|
||||
"docparamnamelist" : compoundrenderer.DocParamNameListSubRenderer,
|
||||
"docparamname" : compoundrenderer.DocParamNameSubRenderer,
|
||||
"docsect1" : compoundrenderer.DocSect1TypeSubRenderer,
|
||||
"docsimplesect" : compoundrenderer.DocSimpleSectTypeSubRenderer,
|
||||
"doctitle" : compoundrenderer.DocTitleTypeSubRenderer,
|
||||
"docformula" : compoundrenderer.DocForumlaTypeSubRenderer,
|
||||
"docimage" : compoundrenderer.DocImageTypeSubRenderer,
|
||||
"docurllink" : compoundrenderer.DocURLLinkSubRenderer,
|
||||
"listing" : compoundrenderer.ListingTypeSubRenderer,
|
||||
"codeline" : compoundrenderer.CodeLineTypeSubRenderer,
|
||||
"highlight" : compoundrenderer.HighlightTypeSubRenderer,
|
||||
"templateparamlist" : compoundrenderer.TemplateParamListRenderer,
|
||||
"inc" : compoundrenderer.IncTypeSubRenderer,
|
||||
"ref" : CreateRefTypeSubRenderer(self.parser_factory),
|
||||
"verbatim" : compoundrenderer.VerbatimTypeSubRenderer,
|
||||
"mixedcontainer" : compoundrenderer.MixedContainerRenderer,
|
||||
"unicode" : UnicodeRenderer,
|
||||
"doclist": compoundrenderer.DocListTypeSubRenderer,
|
||||
"doclistitem": compoundrenderer.DocListItemTypeSubRenderer,
|
||||
}
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
return DoxygenToRstRendererFactory(
|
||||
"root",
|
||||
renderers,
|
||||
self,
|
||||
self.node_factory,
|
||||
self.project_info,
|
||||
state,
|
||||
document,
|
||||
self.rst_content_creator,
|
||||
filter_,
|
||||
target_handler,
|
||||
self.domain_directive_factory
|
||||
)
|
||||
|
||||
def create_child_factory( self, project_info, data_object, parent_renderer_factory ):
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
return DoxygenToRstRendererFactory(
|
||||
node_type,
|
||||
parent_renderer_factory.renderers,
|
||||
self,
|
||||
self.node_factory,
|
||||
parent_renderer_factory.project_info,
|
||||
parent_renderer_factory.state,
|
||||
parent_renderer_factory.document,
|
||||
self.rst_content_creator,
|
||||
parent_renderer_factory.filter_,
|
||||
parent_renderer_factory.target_handler,
|
||||
parent_renderer_factory.domain_directive_factory
|
||||
)
|
||||
|
||||
|
||||
# FactoryFactoryFactory. Ridiculous but necessary.
|
||||
class DoxygenToRstRendererFactoryCreatorConstructor(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_factory,
|
||||
parser_factory,
|
||||
domain_directive_factory,
|
||||
rst_content_creator
|
||||
):
|
||||
|
||||
self.node_factory = node_factory
|
||||
self.parser_factory = parser_factory
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
self.rst_content_creator = rst_content_creator
|
||||
|
||||
def create_factory_creator(self, project_info, document, options, target_handler):
|
||||
|
||||
return DoxygenToRstRendererFactoryCreator(
|
||||
self.node_factory,
|
||||
self.parser_factory,
|
||||
self.domain_directive_factory,
|
||||
self.rst_content_creator,
|
||||
project_info,
|
||||
)
|
||||
|
||||
|
||||
def format_parser_error(name, error, filename, state, lineno, do_unicode_warning):
|
||||
|
||||
warning = '%s: Unable to parse xml file "%s". ' % (name, filename)
|
||||
explanation = 'Reported error: %s. ' % error
|
||||
|
||||
unicode_explanation_text = ""
|
||||
unicode_explanation = []
|
||||
if do_unicode_warning:
|
||||
unicode_explanation_text = textwrap.dedent("""
|
||||
Parsing errors are often due to unicode errors associated with the encoding of the original
|
||||
source files. Doxygen propagates invalid characters from the input source files to the
|
||||
output xml.""").strip().replace("\n", " ")
|
||||
unicode_explanation = [nodes.paragraph("", "", nodes.Text(unicode_explanation_text))]
|
||||
|
||||
return [nodes.warning("",
|
||||
nodes.paragraph("", "", nodes.Text(warning)),
|
||||
nodes.paragraph("", "", nodes.Text(explanation)),
|
||||
*unicode_explanation
|
||||
),
|
||||
state.document.reporter.warning(warning + explanation + unicode_explanation_text, line=lineno)
|
||||
]
|
Binary file not shown.
@ -1,127 +0,0 @@
|
||||
|
||||
class Renderer(object):
|
||||
|
||||
def __init__(self,
|
||||
project_info,
|
||||
context,
|
||||
renderer_factory,
|
||||
node_factory,
|
||||
state,
|
||||
document,
|
||||
target_handler,
|
||||
domain_directive_factory,
|
||||
):
|
||||
|
||||
self.project_info = project_info
|
||||
self.context = context
|
||||
self.data_object = context.node_stack[0]
|
||||
self.renderer_factory = renderer_factory
|
||||
self.node_factory = node_factory
|
||||
self.state = state
|
||||
self.document = document
|
||||
self.target_handler = target_handler
|
||||
self.domain_directive_factory = domain_directive_factory
|
||||
|
||||
if self.context.domain == '':
|
||||
self.context.domain = self.get_domain()
|
||||
|
||||
def get_domain(self):
|
||||
"""Returns the domain for the current node."""
|
||||
|
||||
def get_filename(node):
|
||||
"""Returns the name of a file where the declaration represented by node is located."""
|
||||
try:
|
||||
return node.location.file
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
node_stack = self.context.node_stack
|
||||
node = node_stack[0]
|
||||
# An enumvalue node doesn't have location, so use its parent node for detecting the domain instead.
|
||||
if type(node) == unicode or node.node_type == "enumvalue":
|
||||
node = node_stack[1]
|
||||
filename = get_filename(node)
|
||||
if not filename and node.node_type == "compound":
|
||||
file_data = self.compound_parser.parse(node.refid)
|
||||
filename = get_filename(file_data.compounddef)
|
||||
return self.project_info.domain_for_file(filename) if filename else ''
|
||||
|
||||
def get_fully_qualified_name(self):
|
||||
|
||||
names = []
|
||||
node_stack = self.context.node_stack
|
||||
node = node_stack[0]
|
||||
if node.node_type == 'enumvalue':
|
||||
names.append(node.name)
|
||||
# Skip the name of the containing enum because it is not a part of the fully qualified name.
|
||||
node_stack = node_stack[2:]
|
||||
|
||||
# If the node is a namespace, use its name because namespaces are skipped in the main loop.
|
||||
if node.node_type == 'compound' and node.kind == 'namespace':
|
||||
names.append(node.name)
|
||||
|
||||
for node in node_stack:
|
||||
if node.node_type == 'ref' and len(names) == 0:
|
||||
return node.valueOf_
|
||||
if (node.node_type == 'compound' and node.kind not in ['file', 'namespace']) or \
|
||||
node.node_type == 'memberdef':
|
||||
# We skip the 'file' entries because the file name doesn't form part of the
|
||||
# qualified name for the identifier. We skip the 'namespace' entries because if we
|
||||
# find an object through the namespace 'compound' entry in the index.xml then we'll
|
||||
# also have the 'compounddef' entry in our node stack and we'll get it from that. We
|
||||
# need the 'compounddef' entry because if we find the object through the 'file'
|
||||
# entry in the index.xml file then we need to get the namespace name from somewhere
|
||||
names.insert(0, node.name)
|
||||
if (node.node_type == 'compounddef' and node.kind == 'namespace'):
|
||||
# Nested namespaces include their parent namespace(s) in compoundname. ie,
|
||||
# compoundname is 'foo::bar' instead of just 'bar' for namespace 'bar' nested in
|
||||
# namespace 'foo'. We need full compoundname because node_stack doesn't necessarily
|
||||
# include parent namespaces and we stop here in case it does.
|
||||
names.insert(0, node.compoundname)
|
||||
break
|
||||
|
||||
return '::'.join(names)
|
||||
|
||||
def create_template_node(self, decl):
|
||||
"""Creates a node for the ``template <...>`` part of the declaration."""
|
||||
if not decl.templateparamlist:
|
||||
return None
|
||||
context = self.context.create_child_context(decl.templateparamlist)
|
||||
renderer = self.renderer_factory.create_renderer(context)
|
||||
nodes = [self.node_factory.Text("template <")]
|
||||
nodes.extend(renderer.render())
|
||||
nodes.append(self.node_factory.Text(">"))
|
||||
signode = self.node_factory.desc_signature()
|
||||
signode.extend(nodes)
|
||||
return signode
|
||||
|
||||
def run_domain_directive(self, kind, names):
|
||||
domain_directive = self.renderer_factory.domain_directive_factory.create(
|
||||
self.context.domain, [kind, names] + self.context.directive_args[2:])
|
||||
|
||||
# Translate Breathe's no-link option into the standard noindex option.
|
||||
if 'no-link' in self.context.directive_args[2]:
|
||||
domain_directive.options['noindex'] = True
|
||||
nodes = domain_directive.run()
|
||||
|
||||
# Filter out outer class names if we are rendering a member as a part of a class content.
|
||||
signode = nodes[1].children[0]
|
||||
if len(names) > 0 and self.context.child:
|
||||
signode.children = [n for n in signode.children if not n.tagname == 'desc_addname']
|
||||
return nodes
|
||||
|
||||
|
||||
class RenderContext(object):
|
||||
|
||||
def __init__(self, node_stack, mask_factory, directive_args, domain='', child=False):
|
||||
self.node_stack = node_stack
|
||||
self.mask_factory = mask_factory
|
||||
self.directive_args = directive_args
|
||||
self.domain = domain
|
||||
self.child = child
|
||||
|
||||
def create_child_context(self, data_object):
|
||||
|
||||
node_stack = self.node_stack[:]
|
||||
node_stack.insert(0, self.mask_factory.mask(data_object))
|
||||
return RenderContext(node_stack, self.mask_factory, self.directive_args, self.domain, True)
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -1,115 +0,0 @@
|
||||
|
||||
from .base import Renderer
|
||||
|
||||
class DoxygenTypeSubRenderer(Renderer):
|
||||
|
||||
def render(self):
|
||||
|
||||
nodelist = []
|
||||
|
||||
# Process all the compound children
|
||||
for compound in self.data_object.get_compound():
|
||||
context = self.context.create_child_context(compound)
|
||||
compound_renderer = self.renderer_factory.create_renderer(context)
|
||||
nodelist.extend(compound_renderer.render())
|
||||
|
||||
return nodelist
|
||||
|
||||
|
||||
class CompoundRenderer(Renderer):
|
||||
"""Base class for CompoundTypeSubRenderer and RefTypeSubRenderer."""
|
||||
|
||||
def __init__(self, compound_parser, render_empty_node, *args):
|
||||
self.compound_parser = compound_parser
|
||||
self.render_empty_node = render_empty_node
|
||||
Renderer.__init__(self, *args)
|
||||
|
||||
def create_doxygen_target(self):
|
||||
"""Can be overridden to create a target node which uses the doxygen refid information
|
||||
which can be used for creating links between internal doxygen elements.
|
||||
|
||||
The default implementation should suffice most of the time.
|
||||
"""
|
||||
|
||||
refid = "%s%s" % (self.project_info.name(), self.data_object.refid)
|
||||
return self.target_handler.create_target(refid)
|
||||
|
||||
def render_signature(self, file_data, doxygen_target):
|
||||
# Defer to domains specific directive.
|
||||
name, kind = self.get_node_info(file_data)
|
||||
self.context.directive_args[1] = [self.get_fully_qualified_name()]
|
||||
nodes = self.run_domain_directive(kind, self.context.directive_args[1])
|
||||
node = nodes[1]
|
||||
signode, contentnode = node.children
|
||||
|
||||
# The cpp domain in Sphinx doesn't support structs at the moment, so change the text from "class "
|
||||
# to the correct kind which can be "class " or "struct ".
|
||||
signode[0] = self.node_factory.desc_annotation(kind + ' ', kind + ' ')
|
||||
|
||||
# Check if there is template information and format it as desired
|
||||
template_signode = self.create_template_node(file_data.compounddef)
|
||||
if template_signode:
|
||||
node.insert(0, template_signode)
|
||||
node.children[0].insert(0, doxygen_target)
|
||||
return nodes, contentnode
|
||||
|
||||
def render(self):
|
||||
|
||||
# Read in the corresponding xml file and process
|
||||
file_data = self.compound_parser.parse(self.data_object.refid)
|
||||
|
||||
parent_context = self.context.create_child_context(file_data)
|
||||
data_renderer = self.renderer_factory.create_renderer(parent_context)
|
||||
rendered_data = data_renderer.render()
|
||||
|
||||
if not rendered_data and not self.render_empty_node:
|
||||
return []
|
||||
|
||||
file_data = parent_context.node_stack[0]
|
||||
new_context = parent_context.create_child_context(file_data.compounddef)
|
||||
|
||||
nodes, contentnode = self.render_signature(file_data, self.create_doxygen_target())
|
||||
|
||||
if file_data.compounddef.includes:
|
||||
for include in file_data.compounddef.includes:
|
||||
context = new_context.create_child_context(include)
|
||||
renderer = self.renderer_factory.create_renderer(context)
|
||||
contentnode.extend(renderer.render())
|
||||
|
||||
contentnode.extend(rendered_data)
|
||||
return nodes
|
||||
|
||||
|
||||
class CompoundTypeSubRenderer(CompoundRenderer):
|
||||
|
||||
def __init__(self, compound_parser, *args):
|
||||
CompoundRenderer.__init__(self, compound_parser, True, *args)
|
||||
|
||||
def get_node_info(self, file_data):
|
||||
return self.data_object.name, self.data_object.kind
|
||||
|
||||
|
||||
class FileRenderer(CompoundTypeSubRenderer):
|
||||
|
||||
def render_signature(self, file_data, doxygen_target):
|
||||
# Build targets for linking
|
||||
targets = []
|
||||
targets.extend(doxygen_target)
|
||||
|
||||
title_signode = self.node_factory.desc_signature()
|
||||
title_signode.extend(targets)
|
||||
|
||||
# Set up the title
|
||||
name, kind = self.get_node_info(file_data)
|
||||
title_signode.append(self.node_factory.emphasis(text=kind))
|
||||
title_signode.append(self.node_factory.Text(" "))
|
||||
title_signode.append(self.node_factory.desc_name(text=name))
|
||||
|
||||
contentnode = self.node_factory.desc_content()
|
||||
|
||||
node = self.node_factory.desc()
|
||||
node.document = self.state.document
|
||||
node['objtype'] = kind
|
||||
node.append(title_signode)
|
||||
node.append(contentnode)
|
||||
return [node], contentnode
|
Binary file not shown.
@ -1,62 +0,0 @@
|
||||
"""
|
||||
Masks
|
||||
=====
|
||||
|
||||
Masks are related to filters. Filters can block the processing of particular parts of the xml
|
||||
hierarchy but they can only work on node level. If the part of the xml hierarchy that you want to
|
||||
filter out is read in as an instance of one of the classes in parser/doxygen/*.py then you can use
|
||||
the filters. However, if you want to filter out an attribute from one of the nodes (and some of the
|
||||
xml child nodes are read in as attributes on their parents) then you can't use a filter.
|
||||
|
||||
We introduce the Mask's to fulfil this need. The masks are designed to be applied to a particular
|
||||
node type and to limit the access to particular attributes on the node. For example, then
|
||||
NoParameterNamesMask wraps a node a returns all its standard attributes but returns None for the
|
||||
'declname' and 'defname' attributes.
|
||||
|
||||
Currently the Mask functionality is only used for the text signature rendering for doing function
|
||||
matching.
|
||||
|
||||
"""
|
||||
|
||||
class NoParameterNamesMask(object):
|
||||
|
||||
def __init__(self, data_object):
|
||||
self.data_object = data_object
|
||||
|
||||
def __getattr__(self, attr):
|
||||
|
||||
if attr in ['declname', 'defname', 'defval']:
|
||||
return None
|
||||
|
||||
return getattr(self.data_object, attr)
|
||||
|
||||
class MaskFactory(object):
|
||||
|
||||
def __init__(self, lookup):
|
||||
self.lookup = lookup
|
||||
|
||||
def mask(self, data_object):
|
||||
|
||||
try:
|
||||
node_type = data_object.node_type
|
||||
except AttributeError as e:
|
||||
|
||||
# Horrible hack to silence errors on filtering unicode objects
|
||||
# until we fix the parsing
|
||||
if type(data_object) == unicode:
|
||||
node_type = "unicode"
|
||||
else:
|
||||
raise e
|
||||
|
||||
if node_type in self.lookup:
|
||||
Mask = self.lookup[node_type]
|
||||
return Mask(data_object)
|
||||
|
||||
return data_object
|
||||
|
||||
|
||||
class NullMaskFactory(object):
|
||||
|
||||
def mask(self, data_object):
|
||||
return data_object
|
||||
|
Binary file not shown.
@ -1,40 +0,0 @@
|
||||
|
||||
class TargetHandler(object):
|
||||
|
||||
def __init__(self, project_info, node_factory, document):
|
||||
|
||||
self.project_info = project_info
|
||||
self.node_factory = node_factory
|
||||
self.document = document
|
||||
|
||||
def create_target(self, id_):
|
||||
"""Creates a target node and registers it with the document and returns it in a list"""
|
||||
|
||||
target = self.node_factory.target(ids=[id_], names=[id_])
|
||||
|
||||
try:
|
||||
self.document.note_explicit_target(target)
|
||||
except Exception:
|
||||
# TODO: We should really return a docutils warning node here
|
||||
print("Warning: Duplicate target detected: %s" % id_)
|
||||
|
||||
return [target]
|
||||
|
||||
class NullTargetHandler(object):
|
||||
|
||||
def create_target(self, refid):
|
||||
return []
|
||||
|
||||
class TargetHandlerFactory(object):
|
||||
|
||||
def __init__(self, node_factory):
|
||||
|
||||
self.node_factory = node_factory
|
||||
|
||||
def create_target_handler(self, options, project_info, document):
|
||||
|
||||
if options.has_key("no-link"):
|
||||
return NullTargetHandler()
|
||||
|
||||
return TargetHandler(project_info, self.node_factory, document)
|
||||
|
Binary file not shown.
@ -36,7 +36,7 @@ extensions = [
|
||||
'sphinx.ext.pngmath',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinx.ext.viewcode',
|
||||
'breathe',
|
||||
# 'breathe',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
@ -51,7 +51,7 @@ source_suffix = '.rst'
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = 'source/index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'OBITools3'
|
||||
@ -292,7 +292,7 @@ texinfo_documents = [
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
#Breathe configuration
|
||||
sys.path.append( "../breathe/" )
|
||||
breathe_projects = { "OBITools3": "../doxygen/xml/" }
|
||||
sys.path.append( "breathe/" )
|
||||
breathe_projects = { "OBITools3": "doxygen/xml/" }
|
||||
breathe_default_project = "OBITools3"
|
||||
|
4
doc/doxygen/xml/.gitignore
vendored
4
doc/doxygen/xml/.gitignore
vendored
@ -1,4 +0,0 @@
|
||||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
@ -2,19 +2,19 @@
|
||||
The OBItools3 Data Management System (OBIDMS)
|
||||
*********************************************
|
||||
|
||||
A complete DNA Metabarcoding experiment rely on several kinds of data.
|
||||
A complete DNA metabarcoding experiment relies on several kinds of data.
|
||||
|
||||
- The sequence data resulting of the PCR products sequencing,
|
||||
- The sequence data resulting from the sequencing of the PCR products,
|
||||
- The description of the samples including all their metadata,
|
||||
- One or several refence database used for the taxonomical annotation
|
||||
- One or several taxonomies.
|
||||
- One or several reference databases used for the taxonomic annotation,
|
||||
- One or several taxonomy databases.
|
||||
|
||||
Up to now each of these categories of data were stored in separate
|
||||
files an nothing obliged to keep them together.
|
||||
Up to now, each of these categories of data were stored in separate
|
||||
files, and nothing made it mandatory to keep them together.
|
||||
|
||||
|
||||
The `Data Management System` (DMS) of OBITools3 can be considered
|
||||
as a basic database system.
|
||||
The `Data Management System` (DMS) of OBITools3 can be viewed like a basic
|
||||
database system.
|
||||
|
||||
|
||||
OBIDMS UML
|
||||
@ -25,11 +25,21 @@ OBIDMS UML
|
||||
:download:`html version of the OBIDMS UML file <UML/ObiDMS_UML.class.violet.html>`
|
||||
|
||||
|
||||
An OBIDMS directory consists of :
|
||||
* OBIDMS column files
|
||||
* OBIDMS release files
|
||||
* OBIDMS dictionary files
|
||||
* one OBIDMS history file
|
||||
An OBIDMS directory contains :
|
||||
* one `OBIDMS history file <#obidms-history-files>`_
|
||||
* OBIDMS column directories
|
||||
|
||||
|
||||
OBIDMS column directories
|
||||
=========================
|
||||
|
||||
OBIDMS column directories contain :
|
||||
* all the different versions of one OBIDMS column, under the form of different files (`OBIDMS column files <#obidms-column-files>`_)
|
||||
* one `OBIDMS version file <#obidms-version-files>`_
|
||||
|
||||
The directory name is the column attribute with the extension ``.obicol``.
|
||||
|
||||
Example: ``count.obicol``
|
||||
|
||||
|
||||
OBIDMS column files
|
||||
@ -38,7 +48,7 @@ OBIDMS column files
|
||||
Each OBIDMS column file contains :
|
||||
* a header of a size equal to a multiple of PAGESIZE (PAGESIZE being equal to 4096 bytes
|
||||
on most systems) containing metadata
|
||||
* one column of data with the same OBIType
|
||||
* Lines of data with the same `OBIType <types.html#obitypes>`_
|
||||
|
||||
|
||||
Header
|
||||
@ -48,27 +58,33 @@ The header of an OBIDMS column contains :
|
||||
|
||||
* Endian byte order
|
||||
* Header size (PAGESIZE multiple)
|
||||
*
|
||||
* File status : Open/Closed
|
||||
* Owner : PID of the process that created the file and is the only one allowed to modify it if it is open
|
||||
* Number of lines (total or without the header?)
|
||||
* OBIType
|
||||
* Date of creation
|
||||
* Version of the file
|
||||
* Number of lines of data
|
||||
* Number of lines of data used
|
||||
* `OBIType <types.html#obitypes>`_ (type of the data)
|
||||
* Date of creation of the file
|
||||
* Version of the OBIDMS column
|
||||
* The column name
|
||||
* Eventual comments
|
||||
|
||||
|
||||
Data
|
||||
----
|
||||
|
||||
A column of data with the same OBIType.
|
||||
A line of data corresponds to a vector of elements. Each element is associated with an element name.
|
||||
Elements names are stored in the header. The correspondance between an element and its name is done
|
||||
using their order in the lists of elements and elements names. This structure allows the storage of
|
||||
dictionary-like data.
|
||||
|
||||
Example: In the header, the attribute ``elements_names`` will be associated with the value ``"sample_1;
|
||||
sample_2;sample_3"``, and a line of data with the type ``OBInt_t`` will be stored as an ``OBInt_t`` vector
|
||||
of size three e.g. ``5|8|4``.
|
||||
|
||||
|
||||
Mandatory columns
|
||||
-----------------
|
||||
|
||||
Some columns must exist in an OBIDMS directory :
|
||||
* sequence identifiers column (type *OBIStr_t*)
|
||||
* sequence identifiers column (type ``OBIStr_t``)
|
||||
|
||||
|
||||
File name
|
||||
@ -83,8 +99,7 @@ Example : ``count@3.odc``
|
||||
Modifications
|
||||
-------------
|
||||
|
||||
An OBIDMS column file can only be modified by the process that created it, if its status is set to Open. Those informations are
|
||||
contained in the `header <#header>`_.
|
||||
An OBIDMS column file can only be modified by the process that created it, and while its status is set to Open.
|
||||
|
||||
When a process wants to modify an OBIDMS column file that is closed, it must first clone it. Cloning creates a new version of the
|
||||
file that belongs to the process, i.e., only that process can modify that file, as long as its status is set to Open. Once the process
|
||||
@ -94,6 +109,8 @@ again.
|
||||
That means that one column is stored in one file (if there is only one version)
|
||||
or more (if there are several versions), and that there is one file per version.
|
||||
|
||||
All the versions of one column are stored in one directory.
|
||||
|
||||
|
||||
Versioning
|
||||
----------
|
||||
@ -101,22 +118,22 @@ Versioning
|
||||
The first version of a column file is numbered 0, and each new version increments that
|
||||
number by 1.
|
||||
|
||||
The number of the latest version of an OBIDMS column is stored in an `OBIDMS release file <formats.html#obidms-release-files>`_.
|
||||
The number of the latest version of an OBIDMS column is stored in the `OBIDMS version file <#obidms-version-files>`_ of its directory.
|
||||
|
||||
|
||||
OBIDMS release files
|
||||
OBIDMS version files
|
||||
====================
|
||||
|
||||
Each OBIDMS column is associated with an OBIDMS release file that contains the number of the latest
|
||||
Each OBIDMS column is associated with an OBIDMS version file in its directory, that contains the number of the latest
|
||||
version of the column.
|
||||
|
||||
File name
|
||||
---------
|
||||
|
||||
OBIDMS release files are named with the attribute associated to the data contained in the column, and
|
||||
have the extension ``.odr``.
|
||||
OBIDMS version files are named with the attribute associated to the data contained in the column, and
|
||||
have the extension ``.odv``.
|
||||
|
||||
Example : ``count.odr``
|
||||
Example : ``count.odv``
|
||||
|
||||
|
||||
OBIDMS views
|
||||
@ -140,19 +157,4 @@ operations ever done in the OBIDMS directory and the views in between them :
|
||||
:width: 150 px
|
||||
:align: center
|
||||
|
||||
OBIType header file
|
||||
========================
|
||||
|
||||
.. doxygenfile:: obitypes.h
|
||||
|
||||
|
||||
OBIIntColumn header file
|
||||
========================
|
||||
|
||||
.. doxygenfile:: obiintcolumn.h
|
||||
|
||||
|
||||
OBIColumn header file
|
||||
=====================
|
||||
|
||||
.. doxygenfile:: obicolumn.h
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 67 KiB |
File diff suppressed because it is too large
Load Diff
@ -2,28 +2,28 @@
|
||||
Container types
|
||||
===============
|
||||
|
||||
Containers allow to manage collection of values of homogeneous type.
|
||||
Containers allow to manage collections of values of homogeneous type.
|
||||
Three container types exist.
|
||||
|
||||
A container is a non-mutable structure once it has been locked.
|
||||
Consequently just insert procedure are needed
|
||||
Consequently, only insertion procedures are needed.
|
||||
|
||||
Lists
|
||||
-----
|
||||
|
||||
Correspond to an ordered collection of values belonging an elementary type.
|
||||
Correspond to an ordered collection of values belonging to an elementary type.
|
||||
|
||||
At its creation
|
||||
At its creation, ...
|
||||
|
||||
|
||||
Sets
|
||||
----
|
||||
|
||||
Correspond to an unordered collection of values belonging an elementary type.
|
||||
Correspond to an unordered collection of values belonging to an elementary type.
|
||||
|
||||
|
||||
Dictionaries
|
||||
------------
|
||||
|
||||
Dictionaries allow to associate a `key` to a `value`. Values can be retrieved through its associated key.
|
||||
Values must belong an elementary type and keys must be *OBIStr_t*.
|
||||
Dictionaries allow to associate a `key` to a `value`. Values can be retrieved through their associated key.
|
||||
Values must belong to an elementary type and keys must be *OBIStr_t*.
|
||||
|
@ -2,8 +2,8 @@
|
||||
Data in OBITools3
|
||||
#################
|
||||
|
||||
The OBITools3 inaugure a new way to manage DNA metabarcoding data.
|
||||
They rely on a `Data management System` (DMS) that can be considered as
|
||||
The OBITools3 introduce a new way to manage DNA metabarcoding data.
|
||||
They rely on a `Data management System` (DMS) that can be viewed like
|
||||
a simplified database system.
|
||||
|
||||
|
||||
|
@ -12,7 +12,7 @@ Atomic types
|
||||
========= ========= ============ ==============================
|
||||
integer int32_t OBIInt_t a signed integer value
|
||||
float double OBIFloat_t a floating value
|
||||
boolean ? OBIBool_t a boolean true/false value
|
||||
boolean bool OBIBool_t a boolean true/false value
|
||||
char char OBIChar_t a character
|
||||
index size_t OBIIdx_t an index in a data structure
|
||||
========= ========= ============ ==============================
|
||||
|
@ -63,8 +63,14 @@ Issue tracking
|
||||
==============
|
||||
|
||||
Issue tracking is done using `GitLab <https://about.gitlab.com/>`_ at http://git.metabarcoding.org/.
|
||||
Creating a branch should always lead to the creation of a label that refers to it in GitLab.
|
||||
Tickets should always be labelled with the branches for which they are relevant.
|
||||
Tickets should always be labeled with the branches for which they are relevant.
|
||||
|
||||
|
||||
*************
|
||||
Documentation
|
||||
*************
|
||||
|
||||
C functions are documented in the header files for public functions, and in the source file for private functions.
|
||||
|
||||
|
||||
**************
|
||||
@ -86,7 +92,7 @@ C99 :
|
||||
* Object layer
|
||||
* OBITools3 library
|
||||
|
||||
`Python 3 <https://www.python.org/>`_ :
|
||||
`Python 3.5 <https://www.python.org/>`_ :
|
||||
* Top layer code (scripts)
|
||||
|
||||
For the documentation, `Sphinx <http://sphinx-doc.org/>`_ should be used for both the original
|
||||
@ -99,8 +105,22 @@ in the Sphinx documentation using `Breathe <https://breathe.readthedocs.org/en/l
|
||||
Naming conventions
|
||||
******************
|
||||
|
||||
.. todo::
|
||||
Look for common naming conventions
|
||||
Struct, Enum: ``Title_case``
|
||||
|
||||
Enum members, macros, constants: ``ALL_CAPS``
|
||||
|
||||
Functions, local variables: ``lower_case``
|
||||
|
||||
Public functions: ``obi_lower_case``
|
||||
|
||||
Functions that shouldn't be called directly: ``_lower_case`` (``_`` prefix)
|
||||
|
||||
Global variables: ``g_lower_case`` (``g_`` prefix)
|
||||
|
||||
Pointers: ``pointer_ptr`` (``_ptr`` suffix)
|
||||
|
||||
.. note::
|
||||
Underscores are used to delimit 'words'.
|
||||
|
||||
|
||||
*****************
|
||||
|
@ -11,7 +11,6 @@ OBITools3 documentation
|
||||
|
||||
Programming guidelines <guidelines>
|
||||
Data structures <data>
|
||||
Pistes de reflexion <pistes>
|
||||
|
||||
|
||||
Indices and tables
|
||||
|
@ -1,21 +0,0 @@
|
||||
###################
|
||||
Pistes de reflexion
|
||||
###################
|
||||
|
||||
|
||||
******************************
|
||||
Ce que l'on veut pouvoir faire
|
||||
******************************
|
||||
|
||||
* Gerer les valeurs manquantes
|
||||
* Modifier une colonne en cours d'ecriture (mmap)
|
||||
* Ajouter des valeurs a la fin du fichier d'une colonne en cours d'ecriture (mmap)
|
||||
*
|
||||
|
||||
|
||||
******
|
||||
Divers
|
||||
******
|
||||
|
||||
* Si l'ordre d'une colonne est change, elle est reecrite (pas d'index).
|
||||
* Utilisation de semaphores pour la lecture
|
55
doc/source/specialvalues.rst
Normal file
55
doc/source/specialvalues.rst
Normal file
@ -0,0 +1,55 @@
|
||||
==============
|
||||
Special values
|
||||
==============
|
||||
|
||||
|
||||
NA values
|
||||
=========
|
||||
|
||||
All OBITypes have an associated NA (Not Available) value.
|
||||
NA values are implemented by specifying an explicit NA value for each type,
|
||||
corresponding to the R standards as much as possible:
|
||||
|
||||
* For the type ``OBIInt_t``, the NA value is ``INT_MIN``.
|
||||
|
||||
* For the type ``OBIBool_t``, the NA value is ``2``.
|
||||
|
||||
* For the type ``OBIIdx_t`` and ``OBITaxid_t``, the NA value is ``SIZE_MAX``.
|
||||
|
||||
* For the type ``OBIChar_t``: the NA value is ``\0``.
|
||||
|
||||
* For the type ``OBIFloat_t``::
|
||||
|
||||
typedef union
|
||||
{
|
||||
double value;
|
||||
unsigned int word[2];
|
||||
} ieee_double;
|
||||
|
||||
static double NA_value(void)
|
||||
{
|
||||
volatile ieee_double x;
|
||||
x.word[hw] = 0x7ff00000;
|
||||
x.word[lw] = 1954;
|
||||
return x.value;
|
||||
}
|
||||
|
||||
|
||||
Minimum and maximum values for ``OBIInt_t``
|
||||
===========================================
|
||||
|
||||
* Maximum value : ``INT_MAX``
|
||||
* Minimum value : ``INT_MIN(-1?)``
|
||||
|
||||
|
||||
Infinity values for the type ``OBIFloat_t``
|
||||
===========================================
|
||||
|
||||
* Positive infinity : ``INFINITY`` (should be defined in ``<math.h>``)
|
||||
* Negative infinity : ``-INFINITY``
|
||||
|
||||
|
||||
NaN value for the type ``OBIFloat_t``
|
||||
=====================================
|
||||
|
||||
* NaN (Not a Number) value : ``NAN`` (should be defined in ``<math.h>`` but probably needs to be tested)
|
@ -6,14 +6,10 @@ OBITypes
|
||||
.. image:: ./UML/OBITypes_UML.png
|
||||
:download:`html version of the OBITypes UML file <UML/OBITypes_UML.class.violet.html>`
|
||||
|
||||
.. note::
|
||||
All OBITypes have an associated NA (Not Available) value.
|
||||
We have currently two ideas for implementing NA values:
|
||||
|
||||
- By specifying an explicit NA value for each type
|
||||
- By adding to each column of an OBIDMS a bit vector
|
||||
indicating if the value is defined or not.
|
||||
|
||||
.. image:: ./UML/Obicolumn_classes_UML.png
|
||||
|
||||
:download:`html version of the OBIDMS classes UML file <UML/Obicolumn_classes_UML.class.violet.html>`
|
||||
|
||||
|
||||
.. toctree::
|
||||
@ -21,5 +17,4 @@ OBITypes
|
||||
|
||||
The elementary types <elementary>
|
||||
The containers <containers>
|
||||
|
||||
|
||||
Special values <specialvalues>
|
||||
|
1
doc/sphinx/build_dir.txt
Normal file
1
doc/sphinx/build_dir.txt
Normal file
@ -0,0 +1 @@
|
||||
build/lib.macosx-10.6-intel-3.5
|
BIN
python/obitools3/__init__.pyc
Normal file
BIN
python/obitools3/__init__.pyc
Normal file
Binary file not shown.
16
python/obitools3/obidms/_obidms.cfiles
Normal file
16
python/obitools3/obidms/_obidms.cfiles
Normal file
@ -0,0 +1,16 @@
|
||||
../../../src/obidms.h
|
||||
../../../src/obidms.c
|
||||
../../../src/obidmscolumn.h
|
||||
../../../src/obidmscolumn.c
|
||||
../../../src/obidmscolumndir.h
|
||||
../../../src/obidmscolumndir.c
|
||||
../../../src/obierrno.h
|
||||
../../../src/obierrno.c
|
||||
../../../src/obilittlebigman.h
|
||||
../../../src/obilittlebigman.c
|
||||
../../../src/obitypes.h
|
||||
../../../src/obitypes.c
|
||||
../../../src/private_at_functions.h
|
||||
../../../src/private_at_functions.c
|
||||
../../../src/obiarray.h
|
||||
../../../src/obiarray.c
|
47
python/obitools3/obidms/_obidms.pxd
Normal file
47
python/obitools3/obidms/_obidms.pxd
Normal file
@ -0,0 +1,47 @@
|
||||
#cython: language_level=3
|
||||
|
||||
from .capi.obidms cimport OBIDMS_p
|
||||
from .capi.obidmscolumn cimport OBIDMS_column_p
|
||||
from .capi.obitypes cimport obiversion_t, OBIType_t, index_t
|
||||
|
||||
|
||||
cdef class OBIDMS_column
|
||||
|
||||
|
||||
cdef class OBIDMS:
|
||||
|
||||
cdef OBIDMS_p pointer
|
||||
cdef str dms_name
|
||||
|
||||
cpdef dict list(self)
|
||||
cpdef close(self)
|
||||
cpdef OBIDMS_column open_column(self,
|
||||
str column_name,
|
||||
bint create=*,
|
||||
bint clone=*, bint clone_data=*,
|
||||
obiversion_t version_number=*,
|
||||
OBIType_t data_type=*,
|
||||
index_t nb_lines=*,
|
||||
index_t nb_elements_per_line=*,
|
||||
list elements_names=*,
|
||||
str array_name=*)
|
||||
|
||||
|
||||
cdef class OBIDMS_column:
|
||||
|
||||
cdef OBIDMS_column_p pointer
|
||||
cdef OBIDMS dms
|
||||
cdef str data_type # TODO keep as OBIType_t? both?
|
||||
cdef str dms_name
|
||||
cdef str column_name
|
||||
cdef index_t nb_elements_per_line
|
||||
cdef list elements_names
|
||||
|
||||
# cpdef object get_item(self, index_t line_nb, str element_name) TODO can't declare because not the same in all subclasses
|
||||
# cpdef set_item(self, index_t line_nb, str element_name, object value) TODO can't declare because object value
|
||||
cpdef list get_elements_names(self)
|
||||
cpdef str get_data_type(self)
|
||||
cpdef index_t get_nb_lines_used(self)
|
||||
cpdef str get_creation_date(self)
|
||||
cpdef close(self)
|
||||
|
326
python/obitools3/obidms/_obidms.pyx
Normal file
326
python/obitools3/obidms/_obidms.pyx
Normal file
@ -0,0 +1,326 @@
|
||||
#cython: language_level=3
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from obitools3.utils cimport bytes2str, str2bytes
|
||||
|
||||
from .capi.obidms cimport obi_dms, \
|
||||
obi_close_dms
|
||||
from .capi.obidmscolumn cimport obi_column_get_header_from_name, \
|
||||
obi_unmap_header, \
|
||||
obi_column_get_latest_version_from_name, \
|
||||
obi_create_column, \
|
||||
obi_clone_column, \
|
||||
obi_open_column, \
|
||||
obi_close_column, \
|
||||
obi_column_format_date, \
|
||||
OBIDMS_column_header_p
|
||||
from .capi.obitypes cimport const_char_p, \
|
||||
name_data_type
|
||||
|
||||
|
||||
from ._obidms cimport OBIDMS
|
||||
from ._obidms cimport OBIDMS_column
|
||||
|
||||
from ._obidmscolumn_int cimport OBIDMS_column_int, \
|
||||
OBIDMS_column_int_writable, \
|
||||
OBIDMS_column_int_multi_elts, \
|
||||
OBIDMS_column_int_multi_elts_writable
|
||||
|
||||
from ._obidmscolumn_float cimport OBIDMS_column_float, \
|
||||
OBIDMS_column_float_writable, \
|
||||
OBIDMS_column_float_multi_elts, \
|
||||
OBIDMS_column_float_multi_elts_writable
|
||||
|
||||
from ._obidmscolumn_bool cimport OBIDMS_column_bool, \
|
||||
OBIDMS_column_bool_writable, \
|
||||
OBIDMS_column_bool_multi_elts, \
|
||||
OBIDMS_column_bool_multi_elts_writable
|
||||
|
||||
from ._obidmscolumn_char cimport OBIDMS_column_char, \
|
||||
OBIDMS_column_char_writable, \
|
||||
OBIDMS_column_char_multi_elts, \
|
||||
OBIDMS_column_char_multi_elts_writable
|
||||
|
||||
from ._obidmscolumn_str cimport OBIDMS_column_str, \
|
||||
OBIDMS_column_str_writable, \
|
||||
OBIDMS_column_str_multi_elts, \
|
||||
OBIDMS_column_str_multi_elts_writable
|
||||
|
||||
|
||||
cdef class OBIDMS :
|
||||
|
||||
def __init__(self, str dms_name) :
|
||||
|
||||
# Declarations
|
||||
cdef bytes dms_name_b
|
||||
|
||||
# Format the character string to send to C function
|
||||
dms_name_b = str2bytes(dms_name)
|
||||
|
||||
# Fill structure and create or open the DMS
|
||||
self.dms_name = dms_name
|
||||
self.pointer = obi_dms(<const_char_p> dms_name_b)
|
||||
# TODO: test pointer and raise Exception("Failed opening or creating an OBIDMS")
|
||||
|
||||
|
||||
cpdef close(self) :
|
||||
#TODO close all columns
|
||||
if (obi_close_dms(self.pointer)) < 0 :
|
||||
raise Exception("Problem closing an OBIDMS")
|
||||
|
||||
|
||||
cpdef dict list(self):
|
||||
|
||||
# Declarations
|
||||
cdef object p
|
||||
cdef dict dms = {}
|
||||
cdef str column_name
|
||||
cdef bytes column_name_b
|
||||
cdef str data_type
|
||||
cdef str creation_date
|
||||
cdef obiversion_t latest_version
|
||||
cdef index_t line_count
|
||||
cdef OBIDMS_column_header_p header
|
||||
|
||||
p = Path(self.dms_name+'.obidms')
|
||||
|
||||
print("{:<30} {:<12} {:<25} {:<30} {:<40}".format('-Column name-',
|
||||
'-Data type-',
|
||||
'-Latest version number-',
|
||||
'-Line count of latest version-',
|
||||
'-Creation date of latest version-'))
|
||||
for entry in p.iterdir():
|
||||
if entry.suffix == ".obicol":
|
||||
column_name = entry.stem
|
||||
column_name_b = str2bytes(column_name)
|
||||
dms[column_name] = {}
|
||||
header = obi_column_get_header_from_name(self.pointer, column_name_b)
|
||||
data_type = bytes2str(name_data_type(header.data_type))
|
||||
line_count = header.line_count
|
||||
creation_date = bytes2str(obi_column_format_date(header.creation_date))
|
||||
obi_unmap_header(header) # TODO check if error? but C will already warn and there's nothing to do
|
||||
latest_version = obi_column_get_latest_version_from_name(self.pointer, column_name_b)
|
||||
dms[column_name]['data_type'] = data_type
|
||||
dms[column_name]['latest_version'] = latest_version
|
||||
dms[column_name]['line_count'] = line_count
|
||||
dms[column_name]['creation_date'] = creation_date
|
||||
# TODO : actually get all the informations in the header
|
||||
print("{:<30} {:<12} {:<25} {:<30} {:<40}".format(column_name, data_type, latest_version, line_count, creation_date))
|
||||
|
||||
return dms
|
||||
|
||||
|
||||
cpdef OBIDMS_column open_column(self,
|
||||
str column_name,
|
||||
bint create=False,
|
||||
bint clone=False, bint clone_data=True,
|
||||
obiversion_t version_number=-1,
|
||||
OBIType_t data_type= <OBIType_t> 0,
|
||||
index_t nb_lines=0,
|
||||
index_t nb_elements_per_line=0,
|
||||
list elements_names=None,
|
||||
str array_name="default_obiarray"):
|
||||
|
||||
# Declarations
|
||||
cdef OBIDMS_column column
|
||||
cdef object subclass # TODO object?
|
||||
cdef bytes column_name_b
|
||||
cdef OBIDMS_column_header_p header
|
||||
|
||||
header = NULL
|
||||
|
||||
# Format the character string to send to C function
|
||||
column_name_b = str2bytes(column_name)
|
||||
|
||||
# Get the header of the latest version of the column if
|
||||
# some needed informations are not provided
|
||||
if ((not data_type or not nb_elements_per_line) and not create) :
|
||||
header = obi_column_get_header_from_name(self.pointer, column_name_b)
|
||||
|
||||
# Get the data type if not provided
|
||||
if not data_type :
|
||||
if create :
|
||||
raise Exception("A data type must be specified")
|
||||
else :
|
||||
data_type = header.data_type
|
||||
|
||||
# Get the number of elements per line if not provided and needed
|
||||
if not nb_elements_per_line :
|
||||
if create : # Set to one if not provided (default value)
|
||||
nb_elements_per_line = 1
|
||||
else :
|
||||
nb_elements_per_line = header.nb_elements_per_line
|
||||
if nb_elements_per_line > 1 :
|
||||
elements_names = bytes2str(header.elements_names).split(';')
|
||||
|
||||
if header != NULL :
|
||||
obi_unmap_header(header) # TODO check if error? but C will already warn and there's nothing to do
|
||||
|
||||
# Open the column with the right subclass depending on the data type, the mode
|
||||
# (read-only or writable) and whether there are multiple elements per line or not
|
||||
if data_type == 1 :
|
||||
if (create or clone) :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_int_writable
|
||||
else :
|
||||
subclass = OBIDMS_column_int_multi_elts_writable
|
||||
else :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_int
|
||||
else :
|
||||
subclass = OBIDMS_column_int_multi_elts
|
||||
elif data_type == 2 :
|
||||
if (create or clone) :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_float_writable
|
||||
else :
|
||||
subclass = OBIDMS_column_float_multi_elts_writable
|
||||
else :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_float
|
||||
else :
|
||||
subclass = OBIDMS_column_float_multi_elts
|
||||
elif data_type == 3 :
|
||||
if (create or clone) :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_bool_writable
|
||||
else :
|
||||
subclass = OBIDMS_column_bool_multi_elts_writable
|
||||
else :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_bool
|
||||
else :
|
||||
subclass = OBIDMS_column_bool_multi_elts
|
||||
elif data_type == 4 :
|
||||
if (create or clone) :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_char_writable
|
||||
else :
|
||||
subclass = OBIDMS_column_char_multi_elts_writable
|
||||
else :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_char
|
||||
else :
|
||||
subclass = OBIDMS_column_char_multi_elts
|
||||
elif data_type == 5 :
|
||||
if (create or clone) :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_str_writable
|
||||
else :
|
||||
subclass = OBIDMS_column_str_multi_elts_writable
|
||||
else :
|
||||
if nb_elements_per_line == 1 :
|
||||
subclass = OBIDMS_column_str
|
||||
else :
|
||||
subclass = OBIDMS_column_str_multi_elts
|
||||
else :
|
||||
raise Exception("Problem with the data type")
|
||||
|
||||
column = subclass(self, column_name,
|
||||
create, clone, clone_data,
|
||||
version_number, data_type,
|
||||
nb_lines, nb_elements_per_line,
|
||||
elements_names, array_name)
|
||||
|
||||
return column
|
||||
|
||||
|
||||
|
||||
cdef class OBIDMS_column :
|
||||
|
||||
# Should only be initialized through a subclass
|
||||
def __init__(self,
|
||||
OBIDMS dms,
|
||||
str column_name,
|
||||
bint create,
|
||||
bint clone, bint clone_data,
|
||||
obiversion_t version_number,
|
||||
OBIType_t type,
|
||||
index_t nb_lines,
|
||||
index_t nb_elements_per_line,
|
||||
list elements_names,
|
||||
str array_name):
|
||||
|
||||
# Declarations
|
||||
cdef bytes column_name_b
|
||||
cdef bytes dms_name_b
|
||||
cdef bytes array_name_b
|
||||
cdef bytes elements_names_b
|
||||
|
||||
# Fill structure
|
||||
self.dms = dms
|
||||
self.data_type = bytes2str(name_data_type(type))
|
||||
self.column_name = column_name
|
||||
self.nb_elements_per_line = nb_elements_per_line
|
||||
self.elements_names = elements_names
|
||||
|
||||
# Format the character strings to send them to C functions
|
||||
column_name_b = str2bytes(column_name)
|
||||
dms_name_b = str2bytes(self.dms.dms_name)
|
||||
array_name_b = str2bytes(array_name)
|
||||
|
||||
# Create, clone or open column
|
||||
if create :
|
||||
if elements_names == None :
|
||||
elements_names_b = column_name_b
|
||||
else :
|
||||
elements_names_b = str2bytes(";".join(elements_names))
|
||||
self.pointer = obi_create_column(self.dms.pointer, column_name_b, type,
|
||||
nb_lines, nb_elements_per_line,
|
||||
elements_names_b, array_name_b)
|
||||
else :
|
||||
if clone :
|
||||
self.pointer = obi_clone_column(self.dms.pointer, column_name_b, version_number, clone_data)
|
||||
else :
|
||||
self.pointer = obi_open_column(self.dms.pointer, column_name_b, version_number)
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
|
||||
# Declarations
|
||||
cdef index_t lines_used
|
||||
cdef index_t line_nb
|
||||
|
||||
# Yield each line
|
||||
lines_used = self.pointer.header.lines_used
|
||||
for line_nb in range(lines_used):
|
||||
yield self.get_line(line_nb)
|
||||
|
||||
|
||||
def __setitem__(self, index_t line_nb, object value):
|
||||
self.set_line(line_nb, value)
|
||||
|
||||
|
||||
def __getitem__(self, index_t line_nb):
|
||||
return self.get_line(line_nb)
|
||||
|
||||
|
||||
# cpdef object get_item(self, index_t line_nb, str element_name): TODO
|
||||
# raise NotImplementedError
|
||||
|
||||
|
||||
# cpdef set_item(self, index_t line_nb, str element_name, object value): TODO
|
||||
# raise NotImplementedError
|
||||
|
||||
|
||||
cpdef list get_elements_names(self):
|
||||
return self.elements_names
|
||||
|
||||
|
||||
cpdef str get_data_type(self):
|
||||
return self.data_type
|
||||
|
||||
|
||||
cpdef index_t get_nb_lines_used(self):
|
||||
return self.pointer.header.lines_used
|
||||
|
||||
|
||||
cpdef str get_creation_date(self):
|
||||
return bytes2str(obi_column_format_date(self.pointer.header.creation_date))
|
||||
|
||||
|
||||
cpdef close(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user