re-arrange modules, preparing for python-package-index
This commit is contained in:
177
doc/Makefile
Normal file
177
doc/Makefile
Normal file
@@ -0,0 +1,177 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BAM-BlenderAssetManager.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BAM-BlenderAssetManager.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/BAM-BlenderAssetManager"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BAM-BlenderAssetManager"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
0
doc/exts/sphinxarg/__init__.py
Normal file
0
doc/exts/sphinxarg/__init__.py
Normal file
355
doc/exts/sphinxarg/ext.py
Normal file
355
doc/exts/sphinxarg/ext.py
Normal file
@@ -0,0 +1,355 @@
|
||||
from argparse import ArgumentParser
|
||||
import os
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.directives import flag, unchanged
|
||||
from sphinx.util.compat import Directive
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
from sphinxarg.parser import parse_parser, parser_navigate
|
||||
|
||||
|
||||
def map_nested_definitions(nested_content):
|
||||
if nested_content is None:
|
||||
raise Exception('Nested content should be iterable, not null')
|
||||
# build definition dictionary
|
||||
definitions = {}
|
||||
for item in nested_content:
|
||||
if not isinstance(item, nodes.definition_list):
|
||||
continue
|
||||
for subitem in item:
|
||||
if not isinstance(subitem, nodes.definition_list_item):
|
||||
continue
|
||||
if not len(subitem.children) > 0:
|
||||
continue
|
||||
classifier = '@after'
|
||||
idx = subitem.first_child_matching_class(nodes.classifier)
|
||||
if idx is not None:
|
||||
ci = subitem[idx]
|
||||
if len(ci.children) > 0:
|
||||
classifier = ci.children[0].astext()
|
||||
if classifier is not None and classifier not in (
|
||||
'@replace', '@before', '@after'):
|
||||
raise Exception('Unknown classifier: %s' % classifier)
|
||||
idx = subitem.first_child_matching_class(nodes.term)
|
||||
if idx is not None:
|
||||
ch = subitem[idx]
|
||||
if len(ch.children) > 0:
|
||||
term = ch.children[0].astext()
|
||||
idx = subitem.first_child_matching_class(nodes.definition)
|
||||
if idx is not None:
|
||||
def_node = subitem[idx]
|
||||
def_node.attributes['classifier'] = classifier
|
||||
definitions[term] = def_node
|
||||
return definitions
|
||||
|
||||
|
||||
def print_arg_list(data, nested_content):
|
||||
definitions = map_nested_definitions(nested_content)
|
||||
items = []
|
||||
if 'args' in data:
|
||||
for arg in data['args']:
|
||||
my_def = [nodes.paragraph(text=arg['help'])] if arg['help'] else []
|
||||
name = arg['name']
|
||||
my_def = apply_definition(definitions, my_def, name)
|
||||
if len(my_def) == 0:
|
||||
my_def.append(nodes.paragraph(text='Undocumented'))
|
||||
if 'choices' in arg:
|
||||
my_def.append(nodes.paragraph(
|
||||
text=('Possible choices: %s' % ', '.join([str(c) for c in arg['choices']]))))
|
||||
items.append(
|
||||
nodes.option_list_item(
|
||||
'', nodes.option_group('', nodes.option_string(text=name)),
|
||||
nodes.description('', *my_def)))
|
||||
return nodes.option_list('', *items) if items else None
|
||||
|
||||
|
||||
def print_opt_list(data, nested_content):
|
||||
definitions = map_nested_definitions(nested_content)
|
||||
items = []
|
||||
if 'options' in data:
|
||||
for opt in data['options']:
|
||||
names = []
|
||||
my_def = [nodes.paragraph(text=opt['help'])] if opt['help'] else []
|
||||
for name in opt['name']:
|
||||
option_declaration = [nodes.option_string(text=name)]
|
||||
if opt['default'] is not None \
|
||||
and opt['default'] != '==SUPPRESS==':
|
||||
option_declaration += nodes.option_argument(
|
||||
'', text='=' + str(opt['default']))
|
||||
names.append(nodes.option('', *option_declaration))
|
||||
my_def = apply_definition(definitions, my_def, name)
|
||||
if len(my_def) == 0:
|
||||
my_def.append(nodes.paragraph(text='Undocumented'))
|
||||
if 'choices' in opt:
|
||||
my_def.append(nodes.paragraph(
|
||||
text=('Possible choices: %s' % ', '.join([str(c) for c in opt['choices']]))))
|
||||
items.append(
|
||||
nodes.option_list_item(
|
||||
'', nodes.option_group('', *names),
|
||||
nodes.description('', *my_def)))
|
||||
return nodes.option_list('', *items) if items else None
|
||||
|
||||
|
||||
def print_command_args_and_opts(arg_list, opt_list, sub_list=None):
|
||||
items = []
|
||||
if arg_list:
|
||||
items.append(nodes.definition_list_item(
|
||||
'', nodes.term(text='Positional arguments:'),
|
||||
nodes.definition('', arg_list)))
|
||||
if opt_list:
|
||||
items.append(nodes.definition_list_item(
|
||||
'', nodes.term(text='Options:'),
|
||||
nodes.definition('', opt_list)))
|
||||
if sub_list and len(sub_list):
|
||||
items.append(nodes.definition_list_item(
|
||||
'', nodes.term(text='Sub-commands:'),
|
||||
nodes.definition('', sub_list)))
|
||||
return nodes.definition_list('', *items)
|
||||
|
||||
|
||||
def apply_definition(definitions, my_def, name):
|
||||
if name in definitions:
|
||||
definition = definitions[name]
|
||||
classifier = definition['classifier']
|
||||
if classifier == '@replace':
|
||||
return definition.children
|
||||
if classifier == '@after':
|
||||
return my_def + definition.children
|
||||
if classifier == '@before':
|
||||
return definition.children + my_def
|
||||
raise Exception('Unknown classifier: %s' % classifier)
|
||||
return my_def
|
||||
|
||||
|
||||
def print_subcommand_list(data, nested_content):
|
||||
definitions = map_nested_definitions(nested_content)
|
||||
items = []
|
||||
if 'children' in data:
|
||||
for child in data['children']:
|
||||
my_def = [nodes.paragraph(
|
||||
text=child['help'])] if child['help'] else []
|
||||
name = child['name']
|
||||
my_def = apply_definition(definitions, my_def, name)
|
||||
if len(my_def) == 0:
|
||||
my_def.append(nodes.paragraph(text='Undocumented'))
|
||||
my_def.append(nodes.literal_block(text=child['usage']))
|
||||
my_def.append(print_command_args_and_opts(
|
||||
print_arg_list(child, nested_content),
|
||||
print_opt_list(child, nested_content)
|
||||
))
|
||||
items.append(
|
||||
nodes.definition_list_item(
|
||||
'',
|
||||
nodes.term('', '', nodes.strong(text=name)),
|
||||
nodes.definition('', *my_def)
|
||||
)
|
||||
)
|
||||
return nodes.definition_list('', *items)
|
||||
|
||||
|
||||
class ArgParseDirective(Directive):
|
||||
has_content = True
|
||||
option_spec = dict(module=unchanged, func=unchanged, ref=unchanged,
|
||||
prog=unchanged, path=unchanged, nodefault=flag,
|
||||
manpage=unchanged, nosubcommands=unchanged)
|
||||
|
||||
def _construct_manpage_specific_structure(self, parser_info):
|
||||
"""
|
||||
Construct a typical man page consisting of the following elements:
|
||||
NAME (automatically generated, out of our control)
|
||||
SYNOPSIS
|
||||
DESCRIPTION
|
||||
OPTIONS
|
||||
FILES
|
||||
SEE ALSO
|
||||
BUGS
|
||||
"""
|
||||
# SYNOPSIS section
|
||||
synopsis_section = nodes.section(
|
||||
'',
|
||||
nodes.title(text='Synopsis'),
|
||||
nodes.literal_block(text=parser_info["bare_usage"]),
|
||||
ids=['synopsis-section'])
|
||||
# DESCRIPTION section
|
||||
description_section = nodes.section(
|
||||
'',
|
||||
nodes.title(text='Description'),
|
||||
nodes.paragraph(text=parser_info.get(
|
||||
'description', parser_info.get(
|
||||
'help', "undocumented").capitalize())),
|
||||
ids=['description-section'])
|
||||
nested_parse_with_titles(
|
||||
self.state, self.content, description_section)
|
||||
if parser_info.get('epilog'):
|
||||
# TODO: do whatever sphinx does to understand ReST inside
|
||||
# docstrings magically imported from other places. The nested
|
||||
# parse method invoked above seem to be able to do this but
|
||||
# I haven't found a way to do it for arbitrary text
|
||||
description_section += nodes.paragraph(
|
||||
text=parser_info['epilog'])
|
||||
# OPTIONS section
|
||||
options_section = nodes.section(
|
||||
'',
|
||||
nodes.title(text='Options'),
|
||||
ids=['options-section'])
|
||||
if 'args' in parser_info:
|
||||
options_section += nodes.paragraph()
|
||||
options_section += nodes.subtitle(text='Positional arguments:')
|
||||
options_section += self._format_positional_arguments(parser_info)
|
||||
if 'options' in parser_info:
|
||||
options_section += nodes.paragraph()
|
||||
options_section += nodes.subtitle(text='Optional arguments:')
|
||||
options_section += self._format_optional_arguments(parser_info)
|
||||
items = [
|
||||
# NOTE: we cannot generate NAME ourselves. It is generated by
|
||||
# docutils.writers.manpage
|
||||
synopsis_section,
|
||||
description_section,
|
||||
# TODO: files
|
||||
# TODO: see also
|
||||
# TODO: bugs
|
||||
]
|
||||
if len(options_section.children) > 1:
|
||||
items.append(options_section)
|
||||
if 'nosubcommands' not in self.options:
|
||||
# SUBCOMMANDS section (non-standard)
|
||||
subcommands_section = nodes.section(
|
||||
'',
|
||||
nodes.title(text='Sub-Commands'),
|
||||
ids=['subcommands-section'])
|
||||
if 'children' in parser_info:
|
||||
subcommands_section += self._format_subcommands(parser_info)
|
||||
if len(subcommands_section) > 1:
|
||||
items.append(subcommands_section)
|
||||
if os.getenv("INCLUDE_DEBUG_SECTION"):
|
||||
import json
|
||||
# DEBUG section (non-standard)
|
||||
debug_section = nodes.section(
|
||||
'',
|
||||
nodes.title(text="Argparse + Sphinx Debugging"),
|
||||
nodes.literal_block(text=json.dumps(parser_info, indent=' ')),
|
||||
ids=['debug-section'])
|
||||
items.append(debug_section)
|
||||
return items
|
||||
|
||||
def _format_positional_arguments(self, parser_info):
|
||||
assert 'args' in parser_info
|
||||
items = []
|
||||
for arg in parser_info['args']:
|
||||
arg_items = []
|
||||
if arg['help']:
|
||||
arg_items.append(nodes.paragraph(text=arg['help']))
|
||||
else:
|
||||
arg_items.append(nodes.paragraph(text='Undocumented'))
|
||||
if 'choices' in arg:
|
||||
arg_items.append(
|
||||
nodes.paragraph(
|
||||
text='Possible choices: ' + ', '.join(arg['choices'])))
|
||||
items.append(
|
||||
nodes.option_list_item(
|
||||
'', nodes.option_group(
|
||||
'', nodes.description(text=arg['metavar'])),
|
||||
nodes.description('', *arg_items)))
|
||||
return nodes.option_list('', *items)
|
||||
|
||||
def _format_optional_arguments(self, parser_info):
|
||||
assert 'options' in parser_info
|
||||
items = []
|
||||
for opt in parser_info['options']:
|
||||
names = []
|
||||
opt_items = []
|
||||
for name in opt['name']:
|
||||
option_declaration = [nodes.option_string(text=name)]
|
||||
if opt['default'] is not None \
|
||||
and opt['default'] != '==SUPPRESS==':
|
||||
option_declaration += nodes.option_argument(
|
||||
'', text='=' + str(opt['default']))
|
||||
names.append(nodes.option('', *option_declaration))
|
||||
if opt['help']:
|
||||
opt_items.append(nodes.paragraph(text=opt['help']))
|
||||
else:
|
||||
opt_items.append(nodes.paragraph(text='Undocumented'))
|
||||
if 'choices' in opt:
|
||||
opt_items.append(
|
||||
nodes.paragraph(
|
||||
text='Possible choices: ' + ', '.join(opt['choices'])))
|
||||
items.append(
|
||||
nodes.option_list_item(
|
||||
'', nodes.option_group('', *names),
|
||||
nodes.description('', *opt_items)))
|
||||
return nodes.option_list('', *items)
|
||||
|
||||
def _format_subcommands(self, parser_info):
|
||||
assert 'children' in parser_info
|
||||
items = []
|
||||
for subcmd in parser_info['children']:
|
||||
subcmd_items = []
|
||||
if subcmd['help']:
|
||||
subcmd_items.append(nodes.paragraph(text=subcmd['help']))
|
||||
else:
|
||||
subcmd_items.append(nodes.paragraph(text='Undocumented'))
|
||||
items.append(
|
||||
nodes.definition_list_item(
|
||||
'',
|
||||
nodes.term('', '', nodes.strong(
|
||||
text=subcmd['bare_usage'])),
|
||||
nodes.definition('', *subcmd_items)))
|
||||
return nodes.definition_list('', *items)
|
||||
|
||||
def run(self):
|
||||
if 'module' in self.options and 'func' in self.options:
|
||||
module_name = self.options['module']
|
||||
attr_name = self.options['func']
|
||||
elif 'ref' in self.options:
|
||||
_parts = self.options['ref'].split('.')
|
||||
module_name = '.'.join(_parts[0:-1])
|
||||
attr_name = _parts[-1]
|
||||
else:
|
||||
raise self.error(
|
||||
':module: and :func: should be specified, or :ref:')
|
||||
mod = __import__(module_name, globals(), locals(), [attr_name])
|
||||
if not hasattr(mod, attr_name):
|
||||
raise self.error((
|
||||
'Module "%s" has no attribute "%s"\n'
|
||||
'Incorrect argparse :module: or :func: values?'
|
||||
) % (module_name, attr_name))
|
||||
func = getattr(mod, attr_name)
|
||||
if isinstance(func, ArgumentParser):
|
||||
parser = func
|
||||
else:
|
||||
parser = func()
|
||||
if 'path' not in self.options:
|
||||
self.options['path'] = ''
|
||||
path = str(self.options['path'])
|
||||
if 'prog' in self.options:
|
||||
parser.prog = self.options['prog']
|
||||
result = parse_parser(
|
||||
parser, skip_default_values='nodefault' in self.options)
|
||||
result = parser_navigate(result, path)
|
||||
if 'manpage' in self.options:
|
||||
return self._construct_manpage_specific_structure(result)
|
||||
nested_content = nodes.paragraph()
|
||||
self.state.nested_parse(
|
||||
self.content, self.content_offset, nested_content)
|
||||
nested_content = nested_content.children
|
||||
items = []
|
||||
# add common content between
|
||||
for item in nested_content:
|
||||
if not isinstance(item, nodes.definition_list):
|
||||
items.append(item)
|
||||
if 'description' in result:
|
||||
items.append(nodes.paragraph(text=result['description']))
|
||||
items.append(nodes.literal_block(text=result['usage']))
|
||||
items.append(print_command_args_and_opts(
|
||||
print_arg_list(result, nested_content),
|
||||
print_opt_list(result, nested_content),
|
||||
print_subcommand_list(result, nested_content)
|
||||
))
|
||||
if 'epilog' in result:
|
||||
items.append(nodes.paragraph(text=result['epilog']))
|
||||
return items
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_directive('argparse', ArgParseDirective)
|
125
doc/exts/sphinxarg/parser.py
Normal file
125
doc/exts/sphinxarg/parser.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from argparse import _HelpAction, _SubParsersAction
|
||||
import re
|
||||
|
||||
|
||||
class NavigationException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def parser_navigate(parser_result, path, current_path=None):
|
||||
if isinstance(path, str):
|
||||
if path == '':
|
||||
return parser_result
|
||||
path = re.split('\s+', path)
|
||||
current_path = current_path or []
|
||||
if len(path) == 0:
|
||||
return parser_result
|
||||
if 'children' not in parser_result:
|
||||
raise NavigationException(
|
||||
'Current parser have no children elements. (path: %s)' %
|
||||
' '.join(current_path))
|
||||
next_hop = path.pop(0)
|
||||
for child in parser_result['children']:
|
||||
if child['name'] == next_hop:
|
||||
current_path.append(next_hop)
|
||||
return parser_navigate(child, path, current_path)
|
||||
raise NavigationException(
|
||||
'Current parser have no children element with name: %s (path: %s)' % (
|
||||
next_hop, ' '.join(current_path)))
|
||||
|
||||
|
||||
def _try_add_parser_attribute(data, parser, attribname):
|
||||
attribval = getattr(parser, attribname, None)
|
||||
if attribval is None:
|
||||
return
|
||||
if not isinstance(attribval, str):
|
||||
return
|
||||
if len(attribval) > 0:
|
||||
data[attribname] = attribval
|
||||
|
||||
|
||||
def _format_usage_without_prefix(parser):
|
||||
"""
|
||||
Use private argparse APIs to get the usage string without
|
||||
the 'usage: ' prefix.
|
||||
"""
|
||||
fmt = parser._get_formatter()
|
||||
fmt.add_usage(parser.usage, parser._actions,
|
||||
parser._mutually_exclusive_groups, prefix='')
|
||||
return fmt.format_help().strip()
|
||||
|
||||
|
||||
def parse_parser(parser, data=None, **kwargs):
|
||||
if data is None:
|
||||
data = {
|
||||
'name': '',
|
||||
'usage': parser.format_usage().strip(),
|
||||
'bare_usage': _format_usage_without_prefix(parser),
|
||||
'prog': parser.prog,
|
||||
}
|
||||
_try_add_parser_attribute(data, parser, 'description')
|
||||
_try_add_parser_attribute(data, parser, 'epilog')
|
||||
for action in parser._get_positional_actions():
|
||||
if isinstance(action, _HelpAction):
|
||||
continue
|
||||
if isinstance(action, _SubParsersAction):
|
||||
helps = {}
|
||||
for item in action._choices_actions:
|
||||
helps[item.dest] = item.help
|
||||
|
||||
# commands which share an existing parser are an alias,
|
||||
# don't duplicate docs
|
||||
subsection_alias = {}
|
||||
subsection_alias_names = set()
|
||||
for name, subaction in action._name_parser_map.items():
|
||||
if subaction not in subsection_alias:
|
||||
subsection_alias[subaction] = []
|
||||
else:
|
||||
subsection_alias[subaction].append(name)
|
||||
subsection_alias_names.add(name)
|
||||
|
||||
for name, subaction in action._name_parser_map.items():
|
||||
if name in subsection_alias_names:
|
||||
continue
|
||||
subalias = subsection_alias[subaction]
|
||||
subaction.prog = '%s %s' % (parser.prog, name)
|
||||
subdata = {
|
||||
'name': name if not subalias else
|
||||
'%s (%s)' % (name, ', '.join(subalias)),
|
||||
'help': helps[name] if name in helps else '',
|
||||
'usage': subaction.format_usage().strip(),
|
||||
'bare_usage': _format_usage_without_prefix(subaction),
|
||||
}
|
||||
parse_parser(subaction, subdata, **kwargs)
|
||||
if 'children' not in data:
|
||||
data['children'] = []
|
||||
data['children'].append(subdata)
|
||||
continue
|
||||
if 'args' not in data:
|
||||
data['args'] = []
|
||||
arg = {
|
||||
'name': action.dest,
|
||||
'help': action.help or '',
|
||||
'metavar': action.metavar
|
||||
}
|
||||
if action.choices:
|
||||
arg['choices'] = action.choices
|
||||
data['args'].append(arg)
|
||||
show_defaults = (
|
||||
('skip_default_values' not in kwargs)
|
||||
or (kwargs['skip_default_values'] is False))
|
||||
for action in parser._get_optional_actions():
|
||||
if isinstance(action, _HelpAction):
|
||||
continue
|
||||
if 'options' not in data:
|
||||
data['options'] = []
|
||||
option = {
|
||||
'name': action.option_strings,
|
||||
'default': action.default if show_defaults else '==SUPPRESS==',
|
||||
'help': action.help or ''
|
||||
}
|
||||
if action.choices:
|
||||
option['choices'] = action.choices
|
||||
if "==SUPPRESS==" not in option['help']:
|
||||
data['options'].append(option)
|
||||
return data
|
242
doc/make.bat
Normal file
242
doc/make.bat
Normal file
@@ -0,0 +1,242 @@
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% source
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\BAM-BlenderAssetManager.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\BAM-BlenderAssetManager.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %BUILDDIR%/..
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %BUILDDIR%/..
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
276
doc/source/conf.py
Normal file
276
doc/source/conf.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# BAM - Blender Asset Manager documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Oct 16 16:24:43 2014.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.todo',
|
||||
]
|
||||
|
||||
if 1:
|
||||
extensions += ['sphinxarg.ext']
|
||||
sys.path.extend([
|
||||
# to import 'bam.py'
|
||||
os.path.join(os.path.dirname(__file__), "..", ".."),
|
||||
# to access the 'sphinxarg' extension
|
||||
os.path.abspath(os.path.join("..", "exts"))
|
||||
])
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'BAM - Blender Asset Manager'
|
||||
copyright = u'2014, Blender Institute'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.0.1 alpha'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
try:
|
||||
import sphinx_rtd_theme
|
||||
except ImportError:
|
||||
sphinx_rtd_theme = None
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
if sphinx_rtd_theme:
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
if sphinx_rtd_theme:
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = []
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
html_show_copyright = False
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'BAM-BlenderAssetManagerdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'BAM-BlenderAssetManager.tex', u'BAM - Blender Asset Manager Documentation',
|
||||
u'Blender Institute', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'bam-blenderassetmanager', u'BAM - Blender Asset Manager Documentation',
|
||||
[u'Blender Institute'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'BAM-BlenderAssetManager', u'BAM - Blender Asset Manager Documentation',
|
||||
u'Blender Institute', 'BAM-BlenderAssetManager', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
62
doc/source/design/milestones.rst
Normal file
62
doc/source/design/milestones.rst
Normal file
@@ -0,0 +1,62 @@
|
||||
############
|
||||
Milestones
|
||||
############
|
||||
|
||||
|
||||
Minimum Viable Product
|
||||
======================
|
||||
|
||||
Get existing project migrated to asset manager as soon as possible.
|
||||
|
||||
- No Client-side-SVN.
|
||||
- Checkout/Commit assets.
|
||||
- Basic UI.
|
||||
- ZIP file access (as an example, users wouldn't have to notice)
|
||||
- Testing (basic testing framework)
|
||||
|
||||
|
||||
Performance
|
||||
===========
|
||||
|
||||
- Parallel downloads
|
||||
- Cache (avoid re-downloading files)
|
||||
|
||||
|
||||
Interface
|
||||
=========
|
||||
|
||||
- BAM project definition.
|
||||
- Blender Addon (UI for performing basic actions)
|
||||
- *Maybe* extend GUI client.
|
||||
- Web UI
|
||||
- Download Zipfiles
|
||||
|
||||
|
||||
Scaling/Project
|
||||
===============
|
||||
|
||||
- Consistency checks (correct library linking)
|
||||
|
||||
|
||||
Basic Automated Tasks
|
||||
=====================
|
||||
|
||||
- Generated Previews (basic automated tasks hooked up to asset manager)
|
||||
- Baking (physics)
|
||||
- Web UI (create/view/manage jobs)
|
||||
|
||||
|
||||
Story Tool
|
||||
==========
|
||||
|
||||
- Establish connection between sequencer and 'shot' assets *(abstract concept)*
|
||||
- Integrate into Addon
|
||||
- Connect automated tasks on edit updates, (sync sequence with shot assets)
|
||||
- Expose the edit outside of Blender *(web based UI, reviews, comment, feedback, tasks...)*
|
||||
|
||||
|
||||
Project Management
|
||||
==================
|
||||
|
||||
- Web UI
|
||||
- Connect basic communication tools to assets, commits.
|
262
doc/source/design/planning.rst
Normal file
262
doc/source/design/planning.rst
Normal file
@@ -0,0 +1,262 @@
|
||||
|
||||
##########
|
||||
Pipeline
|
||||
##########
|
||||
|
||||
**The Story Tool**
|
||||
|
||||
*This is the over arching goal of the project.*
|
||||
|
||||
- Asset Management *Manage Files & Content*
|
||||
- Project Management *Manage People & Tasks*
|
||||
- Automated Tasks *Manage Geneated Content*
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
Design Goals
|
||||
|
||||
...this is a tool, not a framework,
|
||||
for anyone making animation(s) with Blender.
|
||||
|
||||
|
||||
- Handle multiple projects.
|
||||
- Foresee use of other tools (as well as Blender), in the work-flow.
|
||||
- Support multi-site/distributed work-flow.
|
||||
- Support **some** of the functionality in the Blender-Cloud.
|
||||
- Design to **allow** for swappable modular components, even if we end up sticking with single technologies.
|
||||
|
||||
|
||||
|
||||
Asset Managment
|
||||
===============
|
||||
|
||||
- Assets <-> Users
|
||||
- Revisions
|
||||
- Variations
|
||||
|
||||
|
||||
Architecture Overview
|
||||
---------------------
|
||||
|
||||
Server
|
||||
^^^^^^
|
||||
|
||||
- File asset
|
||||
- Tools (blend file packer, evaluate sequencer, visualize deps, automated tasks ...)
|
||||
- Public API (web service), *communicates with client.*
|
||||
|
||||
Client
|
||||
^^^^^^
|
||||
|
||||
- Interface GUI/CLI (Blender/Web-UI also)
|
||||
- Tools (manage files on the client, cache.)
|
||||
- Local files (models, images)
|
||||
|
||||
|
||||
User Stories
|
||||
------------
|
||||
|
||||
|
||||
Layout Artist
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Mathieu has to update the camera motion in an existing shot in layout stage.
|
||||
|
||||
Since he's in the middle of the project, he loads Blender and accesses the 'Recent Projects Menu'
|
||||
Which shows an interface to open the file he's looking for.
|
||||
In this case someone modified a prop, giving the message "Shader changed, by Pablo".
|
||||
Which he updates because it only takes a few seconds.
|
||||
|
||||
Now he's able to edit the camera and save his work (exit Blender, reopen...etc)
|
||||
When he's happy with the changes he opens the file menu **File -> BAM -> Commit Changes**
|
||||
|
||||
This prompts him with a dialog showing a list of the changed files.
|
||||
He enters a commit message explaining the change and presses **Commit**.
|
||||
|
||||
a progress bar appears while the data is being uploaded.
|
||||
|
||||
He finally receives confirmation that the commit uploaded correctly.
|
||||
|
||||
|
||||
Animator
|
||||
^^^^^^^^
|
||||
|
||||
Hjalti opens the project management web site and finds he's been assigned a new shot to animate.
|
||||
|
||||
He opens Blender and selects **File -> BAM -> Load**,
|
||||
this prompts him with a browser which he uses to locate the shot,
|
||||
which he can easily identify by the name: ``shot_012_anim.blend``
|
||||
|
||||
He confirms the action, which shows a download progress bar which immediately loads the file in Blender.
|
||||
|
||||
He is presented with a low poly scene containing rigs with the characters in the shot and the props they interact with.
|
||||
as well as a low resolution version of the environment.
|
||||
|
||||
He can work on the animation, and modify the file, and commit... *as Mathieu did*
|
||||
|
||||
|
||||
Editor
|
||||
^^^^^^
|
||||
|
||||
Mathieu is going through his daily review of the edit in Blender,
|
||||
|
||||
He opens the edit blend **File -> BAM -> Recent Files -> Edit**,
|
||||
|
||||
This shows the sequencer view with each shot as a strip,
|
||||
|
||||
He can add a new shot into the edit **Sequencer Header -> Add -> BAM Shot**
|
||||
|
||||
.. note::
|
||||
|
||||
exactly how this is done is yet to be decieded,
|
||||
|
||||
however the shots will be created and managed on the server (likely via a web-ui)
|
||||
|
||||
A popup will appear with a list of shots which can be selected to add.
|
||||
|
||||
At this point the sequencer can be used as usual,
|
||||
|
||||
**However** the clip in the sequencer is now **the** reference for length/timing of the shot,
|
||||
its values are propagated to the server (*once committed*).
|
||||
|
||||
|
||||
Implementation Details
|
||||
----------------------
|
||||
|
||||
This document describes the layout for Blender pipeline.
|
||||
|
||||
|
||||
Overview
|
||||
^^^^^^^^
|
||||
|
||||
- Use SVN for internal storage.
|
||||
- SVN repository is for internal storage (but keep usable as *last resort*)
|
||||
- Support extracting single ``.blend`` file, and committing it back (without a full checkout),
|
||||
useful for remote artists.
|
||||
|
||||
|
||||
SVN Commit Abstraction
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Motivation:
|
||||
|
||||
Artists need to be able to work on jobs without downloading entire repository.
|
||||
|
||||
|
||||
|
||||
Workflow:
|
||||
|
||||
- Select an asset to *checkout* (Likely via a web-ui/blender-ui).
|
||||
- Download the asset and its dependencies (web/cli/blender-ui).
|
||||
- Modify data locally (images, 3d... text... etc).
|
||||
- Submit task back with commit log (blender-ui/cli/web?).
|
||||
(Server handles commit).
|
||||
|
||||
|
||||
|
||||
Technical details:
|
||||
|
||||
- Server handles SVN integration which is hidden from the client.
|
||||
- The job submission and editing workflow is handled by client/server,
|
||||
Server creates binary blobs for the bundles,
|
||||
client handles download and create a usable directory for editing.
|
||||
- Path remapping of ``.blend`` files must be handled
|
||||
(in both directions, likely using ``blendfile.py``).
|
||||
- Use cache on client to avoid re-downloading the same resources.
|
||||
|
||||
|
||||
Components
|
||||
----------
|
||||
|
||||
Client
|
||||
^^^^^^
|
||||
|
||||
- UI (list + checkout (remote assets), edit + commit (local assets))
|
||||
- CLI (command line tool for low level access, scripts TD can use... etc)
|
||||
- Blender/Integrated UI
|
||||
- Web-UI (browse assets, limited access).
|
||||
|
||||
- Tools
|
||||
- browse remote repo
|
||||
- downloader (simple zip)
|
||||
- checkout/commit workflow (check what to download, commit whats changed, manage cache internally avoid re-download)
|
||||
|
||||
- Data
|
||||
- Files/Assets (blend files, textures)
|
||||
- Cache (physics assets which can be regenerated on the server)
|
||||
|
||||
|
||||
Server
|
||||
^^^^^^
|
||||
|
||||
- Write blend file extractor / packager.
|
||||
- Write online SVN browser.
|
||||
- ... TODO
|
||||
|
||||
|
||||
|
||||
Project Management
|
||||
==================
|
||||
|
||||
Use phabricator! DONE :D
|
||||
|
||||
|
||||
Automated Tasks
|
||||
===============
|
||||
|
||||
|
||||
Components
|
||||
----------
|
||||
|
||||
There are 2 types of tasks to be automated.
|
||||
|
||||
* User submitted tasks.
|
||||
* Tasks generated by events such as commit hooks, finished rendering... etc.
|
||||
|
||||
Automated tasks are broken into 3 steps.
|
||||
|
||||
* Creation (API/CLI/GUI)
|
||||
* Scheduling/queueing (Managed by the server)
|
||||
* Execution/job management (Controlled via the server, though API's & UI's)
|
||||
|
||||
|
||||
User Stories
|
||||
------------
|
||||
|
||||
Heres a list of tasks we would expect the system to support
|
||||
|
||||
- Generating Renderfarm Preview
|
||||
- Low resolution textures for animation
|
||||
- High Resolution Simulation (hair, smoke)
|
||||
- Final Render a Scene
|
||||
- OpenGL Preview Every Shot
|
||||
- Bundle a Blend file into a ZIP
|
||||
- Synchronizing Data (SVN/Database... repositories... backups)
|
||||
- Consistency checks (automated tasks to validate the state of the project)
|
||||
- Blend file hygiene/lint (unused datablocks, images not used anywhere)
|
||||
- Building Blender
|
||||
|
||||
|
||||
Implementation
|
||||
--------------
|
||||
|
||||
We plan to develop a very simple system leveraging existing technologies.
|
||||
|
||||
|
||||
Dashboard
|
||||
^^^^^^^^^
|
||||
|
||||
UI (web based), allows manual creation of tasks.
|
||||
|
||||
|
||||
Server
|
||||
^^^^^^
|
||||
|
||||
Backend connected to database and scheduler, managing & assigning jobs to workers.
|
||||
|
||||
|
||||
Worker
|
||||
^^^^^^
|
||||
|
||||
Simple client, exposes control of the machine via an API.
|
22
doc/source/index.rst
Normal file
22
doc/source/index.rst
Normal file
@@ -0,0 +1,22 @@
|
||||
BAM - Blender Asset Manager's documentation
|
||||
===========================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
manual/index.rst
|
||||
reference/index.rst
|
||||
install/index.rst
|
||||
design/planning.rst
|
||||
design/milestones.rst
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
29
doc/source/install/index.rst
Normal file
29
doc/source/install/index.rst
Normal file
@@ -0,0 +1,29 @@
|
||||
Installation and requirements
|
||||
#############################
|
||||
|
||||
How to get the system up and running.
|
||||
|
||||
Webservice
|
||||
==========
|
||||
|
||||
The web-service is a simple web app based on the Flask framework, plus a few extensions.
|
||||
In order to get it up and running.
|
||||
|
||||
- Create a virtual environment
|
||||
- Activate the virtual environment
|
||||
- Install dependencies with ``pip3 install -r requirements.txt``
|
||||
- Run the service
|
||||
|
||||
To learn about the web-service usage, check out the proper (non existing) section.
|
||||
|
||||
|
||||
Client
|
||||
======
|
||||
|
||||
In order to use the `bam` command in your terminal, you can add this file in any of
|
||||
your BIN paths and call it `bam`. Don't forget to give it +x permissions. ::
|
||||
|
||||
#!/bin/sh
|
||||
exec python /absolute/path/to/bam/client/cli/bam.py "$@"
|
||||
|
||||
As you can see, the file links to your bam.py file, so make sure that one is right!
|
101
doc/source/manual/index.rst
Normal file
101
doc/source/manual/index.rst
Normal file
@@ -0,0 +1,101 @@
|
||||
User manual
|
||||
###########
|
||||
|
||||
Using BAM is easy and fun! Provided that:
|
||||
|
||||
- you know how to use the command line of your os
|
||||
- have some experience of how versioning systems work
|
||||
|
||||
Actually, this is not true, and in this guide we will explain to use BAM client from scracth.
|
||||
|
||||
.. hint:: Do not try to follow this page as a step-by-step tutorial, since its content might
|
||||
be not completely coherent. The purpose of this manual is simply to explain the bam
|
||||
workflow from the artist point of view.
|
||||
|
||||
|
||||
Project Initialization
|
||||
======================
|
||||
|
||||
In order to start working, we need to initialize a *project folder*. This operation should
|
||||
be done only once. To create a project folder we need to open our terminal, and go to the
|
||||
location where we want to store the project folder. Then we can type::
|
||||
|
||||
bam init http://bam:5000/gooseberry
|
||||
|
||||
This command creates a ``gooseberry`` folder, containing information about the project. If
|
||||
we enter the folder and use the ``ls`` comand command, we notice that it is empty, but if
|
||||
we use the ``bam ls`` command we see a directory listing. This listing is provided by the
|
||||
project server on the fly and it allows us to browse its content without having a local copy
|
||||
on our machine.
|
||||
|
||||
The project folder can be moved anywhere, at any time. The exact ``bam init`` syntax is
|
||||
available in the reference section.
|
||||
|
||||
|
||||
Session creation
|
||||
================
|
||||
|
||||
Once the project has been initialized and we are able to browse it remotely, we can proceed
|
||||
checking out a file from it. For example we can type::
|
||||
|
||||
bam co libs/envs/jungle/jungle_opening.blend
|
||||
|
||||
This creates a ``jungle_opening`` folder inside of our ``gooseberry`` project folder, which
|
||||
will contain the ``jungle_opening.blend``, along with all its dependencies (library files,
|
||||
textures, etc.) organized as follows. ::
|
||||
|
||||
jungle_opening.blend
|
||||
relative/maps/path/map.png
|
||||
_absolute/maps/path/map.png
|
||||
|
||||
As we can see, folders starting with the ``_`` character map to an absolute path on the server,
|
||||
while the other folders are relative to the file that was used to create the session.
|
||||
|
||||
|
||||
Editing
|
||||
=======
|
||||
|
||||
At this point we can edit any file in the session, and the system will keep track of our changes.
|
||||
Currently we can:
|
||||
|
||||
- add new files to the session
|
||||
- delete files
|
||||
- edit files
|
||||
|
||||
We can not:
|
||||
|
||||
- rename files
|
||||
|
||||
In order to check what is the status of our edits, we can use ``bam st``, which will print a list
|
||||
of edited, added and deleted files.
|
||||
|
||||
.. note:: Sessions are meant to create a contained and controlled working environment. We should
|
||||
never, ever refer to content that is outside of a session folder.
|
||||
|
||||
|
||||
Committing changes
|
||||
==================
|
||||
|
||||
Once we are happy with the changes we made to the session, we can sent id back to the server, which
|
||||
will take care of versioning and merging it back into the project.
|
||||
To commit a change, simply type::
|
||||
|
||||
bam ci -m 'Updated trees'
|
||||
|
||||
If you are outside the session folder, you have to specify it::
|
||||
|
||||
bam ci jungle_opening -m 'Updated trees'
|
||||
|
||||
After committing, we can keep working in the same session, and do further commits.
|
||||
|
||||
|
||||
Updatding and existing session
|
||||
==============================
|
||||
|
||||
It is possible to update and existing session by running::
|
||||
|
||||
bam update
|
||||
|
||||
Make sure you have committed your files before updating.
|
||||
|
||||
|
9
doc/source/reference/index.rst
Normal file
9
doc/source/reference/index.rst
Normal file
@@ -0,0 +1,9 @@
|
||||
Client Reference
|
||||
################
|
||||
|
||||
Here is a reference of all the BAM cli commands.
|
||||
|
||||
.. argparse::
|
||||
:module: bam.cli
|
||||
:func: create_argparse
|
||||
:prog: bam
|
Reference in New Issue
Block a user