Simplify dump; first simple snapshot-components passing
This commit is contained in:
parent
ac26d8d610
commit
439f7b9d58
|
@ -0,0 +1,240 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " fasthtml 'fast html': to make HTML files without regenerating the API"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " epub3 to make an epub3"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
@echo " dummy to check syntax errors of document sources"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
rm -rf modules
|
||||
|
||||
.PHONY: html
|
||||
html:
|
||||
sphinx-apidoc -f -l -o modules ../ereuse_devicehub
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: fasthtml
|
||||
fasthtml:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
.PHONY: dirhtml
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
.PHONY: singlehtml
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
.PHONY: pickle
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
.PHONY: json
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
.PHONY: htmlhelp
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
.PHONY: qthelp
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/DeviceHub.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/DeviceHub.qhc"
|
||||
|
||||
.PHONY: applehelp
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
.PHONY: devhelp
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/DeviceHub"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/DeviceHub"
|
||||
@echo "# devhelp"
|
||||
|
||||
.PHONY: epub
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
.PHONY: epub3
|
||||
epub3:
|
||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
||||
@echo
|
||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
||||
|
||||
.PHONY: latex
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
.PHONY: latexpdf
|
||||
latexpdf:
|
||||
sphinx-apidoc -f -l -o . ../ereuse_devicehub
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: latexpdfja
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
.PHONY: text
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
.PHONY: man
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
.PHONY: texinfo
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
.PHONY: info
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
.PHONY: gettext
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
.PHONY: changes
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
.PHONY: linkcheck
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
.PHONY: doctest
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
.PHONY: coverage
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
.PHONY: xml
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
.PHONY: pseudoxml
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
|
||||
.PHONY: dummy
|
||||
dummy:
|
||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
||||
@echo
|
||||
@echo "Build finished. Dummy builder generates no files."
|
|
@ -0,0 +1,300 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# DeviceHub documentation build configuration file, created by
|
||||
# sphinx-quickstart on Mon Apr 18 16:40:20 2016.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('../ereuse_devicehub'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
needs_sphinx = '1.4.7'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.httpdomain',
|
||||
'sphinx.ext.todo'
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'DeviceHub'
|
||||
copyright = '2017, eReuse.org team'
|
||||
author = 'eReuse.org team'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This patterns also effect to html_static_path and html_extra_path
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = False
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'alabaster'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents.
|
||||
# "<project> v<release> documentation" by default.
|
||||
# html_title = 'DeviceHub v0.1'
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (relative to this directory) to use as a favicon of
|
||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
# html_extra_path = []
|
||||
|
||||
# If not None, a 'Last updated on:' timestamp is inserted at every page
|
||||
# bottom, using the given strftime format.
|
||||
# The empty string is equivalent to '%b %d, %Y'.
|
||||
# html_last_updated_fmt = None
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
html_split_index = True
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
|
||||
# Language to be used for generating the HTML full-text search index.
|
||||
# Sphinx supports the following languages:
|
||||
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
|
||||
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
|
||||
# html_search_language = 'en'
|
||||
|
||||
# A dictionary with options for the search language support, empty by default.
|
||||
# 'ja' uses this config value.
|
||||
# 'zh' user can custom change `jieba` dictionary path.
|
||||
# html_search_options = {'type': 'default'}
|
||||
|
||||
# The name of a javascript file (relative to the configuration directory) that
|
||||
# implements a search results scorer. If empty, the default will be used.
|
||||
# html_search_scorer = 'scorer.js'
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'DeviceHubdoc'
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
# 'figure_align': 'htbp',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'DeviceHub.tex', 'DeviceHub Documentation',
|
||||
'eReuse.org team', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'devicehub', 'DeviceHub Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'DeviceHub', 'DeviceHub Documentation',
|
||||
author, 'DeviceHub', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||
|
||||
autodoc_default_flags = ['members', 'private-members']
|
||||
autodoc_member_order = 'bysource'
|
|
@ -0,0 +1,20 @@
|
|||
.. Dependencies: sphinx sphinxcontrib-httpdomain
|
||||
.. title:: DeviceHub
|
||||
|
||||
.. image:: https://www.ereuse.org/files/2017/04/DeviceHub-logo-V2.svg
|
||||
:height: 100px
|
||||
:alt: DeviceHub logo
|
||||
|
||||
This is the documentation and API of the
|
||||
`eReuse.org DeviceHub <https://github.com/eReuse/DeviceHub>`_.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
snapshot
|
||||
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
|
||||
.. image::
|
|
@ -0,0 +1,23 @@
|
|||
Snapshot
|
||||
========
|
||||
The Snapshot updates the state of the device with information about its components and events
|
||||
performed at them.
|
||||
|
||||
When receiving a Snapshot, the DeviceHub creates, adds and removes components to match the
|
||||
Snapshot. For example, if a Snapshot of a computer contains a new component, the system will
|
||||
search for the component in its database and, if not found, create it, and finally adding it
|
||||
to the computer.
|
||||
|
||||
Snapshots can bundle some events, usually tests and hard-drive erasures. In such case the
|
||||
DeviceHub will save those events.
|
||||
|
||||
A Snapshot is used with Remove to represent changes in components for a device:
|
||||
1. A device is created in the database always with a Snapshot. If this device had components,
|
||||
they are created (if they did not existed before) in the same time with the same Snapshot.
|
||||
2. Time after, a new Snapshot updates component information. If, for example, this new Snasphot
|
||||
doesn't have a component, it means that this component is not present anymore in the device,
|
||||
thus removing it from it. Then we have that:
|
||||
- Components to add: snapshot2.components - snapshot1.components
|
||||
- Components to remove: snapshot1.components - snapshot2.components
|
||||
When adding a component, there may be the case this component existed before and it was
|
||||
inside another device. In such case, DeviceHub will perform ``Remove`` on the old parent.
|
|
@ -3,7 +3,8 @@ from distutils.version import StrictVersion
|
|||
from ereuse_devicehub.resources.device import ComponentDef, ComputerDef, DesktopDef, DeviceDef, \
|
||||
GraphicCardDef, HardDriveDef, LaptopDef, MicrotowerDef, MotherboardDef, NetbookDef, \
|
||||
NetworkAdapterDef, ProcessorDef, RamModuleDef, ServerDef
|
||||
from ereuse_devicehub.resources.event import EventDef, SnapshotDef
|
||||
from ereuse_devicehub.resources.event import EventDef, SnapshotDef, TestDef, TestHardDriveDef, \
|
||||
AddDef, RemoveDef
|
||||
from ereuse_devicehub.resources.user import UserDef
|
||||
from teal.config import Config
|
||||
|
||||
|
@ -12,7 +13,8 @@ class DevicehubConfig(Config):
|
|||
RESOURCE_DEFINITIONS = (
|
||||
DeviceDef, ComputerDef, DesktopDef, LaptopDef, NetbookDef, ServerDef, MicrotowerDef,
|
||||
ComponentDef, GraphicCardDef, HardDriveDef, MotherboardDef, NetworkAdapterDef,
|
||||
RamModuleDef, ProcessorDef, UserDef, EventDef, SnapshotDef
|
||||
RamModuleDef, ProcessorDef, UserDef, EventDef, AddDef, RemoveDef, SnapshotDef,
|
||||
TestDef, TestHardDriveDef
|
||||
)
|
||||
PASSWORD_SCHEMES = {'pbkdf2_sha256'}
|
||||
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/dh-db1'
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from contextlib import suppress
|
||||
from operator import attrgetter
|
||||
from typing import Dict, Set
|
||||
|
||||
from ereuse_utils.naming import Naming
|
||||
|
@ -28,6 +29,14 @@ class Device(Thing):
|
|||
height = Column(Float(precision=3, decimal_return_scale=3),
|
||||
check_range('height', 0.1, 3)) # type: float
|
||||
|
||||
@property
|
||||
def events(self) -> list:
|
||||
"""All the events performed to the device."""
|
||||
# Tried to use chain() but Marshmallow doesn't like it :-(
|
||||
events = self.events_multiple + self.events_one
|
||||
events.sort(key=attrgetter('id'))
|
||||
return events
|
||||
|
||||
def __init__(self, *args, **kw) -> None:
|
||||
super().__init__(*args, **kw)
|
||||
with suppress(TypeError):
|
||||
|
@ -59,14 +68,17 @@ class Device(Thing):
|
|||
extensions/declarative/api.html
|
||||
#sqlalchemy.ext.declarative.declared_attr>`_
|
||||
"""
|
||||
args = {POLYMORPHIC_ID: cls.__name__}
|
||||
if cls.__name__ == 'Device':
|
||||
args = {POLYMORPHIC_ID: cls.t}
|
||||
if cls.t == 'Device':
|
||||
args[POLYMORPHIC_ON] = cls.type
|
||||
return args
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.id < other.id
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<{0.t} {0.id!r} model={0.model!r} S/N={0.serial_number!r}>'.format(self)
|
||||
|
||||
|
||||
class Computer(Device):
|
||||
id = Column(BigInteger, ForeignKey(Device.id), primary_key=True) # type: int
|
||||
|
@ -97,7 +109,10 @@ class Component(Device):
|
|||
|
||||
parent_id = Column(BigInteger, ForeignKey('computer.id'))
|
||||
parent = relationship(Computer,
|
||||
backref=backref('components', lazy=True, cascade=CASCADE),
|
||||
backref=backref('components',
|
||||
lazy=True,
|
||||
cascade=CASCADE,
|
||||
order_by=lambda: Component.id),
|
||||
primaryjoin='Component.parent_id == Computer.id') # type: Device
|
||||
|
||||
def similar_one(self, parent: Computer, blacklist: Set[int]) -> 'Component':
|
||||
|
@ -119,19 +134,29 @@ class Component(Device):
|
|||
raise ResourceNotFound(self.type)
|
||||
return component
|
||||
|
||||
@property
|
||||
def events(self) -> list:
|
||||
events = super().events
|
||||
events.extend(self.events_components)
|
||||
events.sort(key=attrgetter('id'))
|
||||
return events
|
||||
|
||||
class GraphicCard(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
|
||||
class JoinedComponentTableMixin:
|
||||
@declared_attr
|
||||
def id(cls):
|
||||
return Column(BigInteger, ForeignKey(Component.id), primary_key=True)
|
||||
|
||||
|
||||
class GraphicCard(JoinedComponentTableMixin, Component):
|
||||
memory = Column(SmallInteger, check_range('memory', min=1, max=10000)) # type: int
|
||||
|
||||
|
||||
class HardDrive(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
class HardDrive(JoinedComponentTableMixin, Component):
|
||||
size = Column(Integer, check_range('size', min=1, max=10 ** 8)) # type: int
|
||||
|
||||
|
||||
class Motherboard(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
class Motherboard(JoinedComponentTableMixin, Component):
|
||||
slots = Column(SmallInteger, check_range('slots')) # type: int
|
||||
usb = Column(SmallInteger, check_range('usb')) # type: int
|
||||
firewire = Column(SmallInteger, check_range('firewire')) # type: int
|
||||
|
@ -139,19 +164,16 @@ class Motherboard(Component):
|
|||
pcmcia = Column(SmallInteger, check_range('pcmcia')) # type: int
|
||||
|
||||
|
||||
class NetworkAdapter(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
class NetworkAdapter(JoinedComponentTableMixin, Component):
|
||||
speed = Column(SmallInteger, check_range('speed', min=10, max=10000)) # type: int
|
||||
|
||||
|
||||
class Processor(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
class Processor(JoinedComponentTableMixin, Component):
|
||||
speed = Column(Float, check_range('speed', 0.1, 15))
|
||||
cores = Column(SmallInteger, check_range('cores', 1, 10))
|
||||
address = Column(SmallInteger, check_range('address', 8, 256))
|
||||
|
||||
|
||||
class RamModule(Component):
|
||||
id = Column(BigInteger, ForeignKey(Component.id), primary_key=True) # type: int
|
||||
class RamModule(JoinedComponentTableMixin, Component):
|
||||
size = Column(SmallInteger, check_range('size', min=128, max=17000))
|
||||
speed = Column(Float, check_range('speed', min=100, max=10000))
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from marshmallow import post_dump
|
||||
from marshmallow.fields import Float, Integer, Str
|
||||
from marshmallow.validate import Length, OneOf, Range
|
||||
|
||||
|
@ -32,16 +31,6 @@ class Device(Thing):
|
|||
unit=UnitCodes.m,
|
||||
description='The height of the device in meters.')
|
||||
events = NestedOn('Event', many=True, dump_only=True)
|
||||
events_one = NestedOn('Event', many=True, dump_only=True, description='Not used.')
|
||||
events_components = NestedOn('Event', many=True, dump_only=True, description='Not used.')
|
||||
|
||||
@post_dump
|
||||
def merge_events(self, data: dict) -> dict:
|
||||
if isinstance(data.get('events_one', None), list):
|
||||
data.setdefault('events', []).extend(data.pop('events_one'))
|
||||
if isinstance(data.get('events_components', None), list):
|
||||
data.setdefault('events', []).extend(data.pop('events_components'))
|
||||
return data
|
||||
|
||||
|
||||
class Computer(Device):
|
||||
|
|
|
@ -158,22 +158,17 @@ class Sync:
|
|||
be re-added.
|
||||
:return: A list of Add / Remove events.
|
||||
"""
|
||||
# Note that we create the Remove events before the Add ones
|
||||
events = []
|
||||
old_components = set(device.components)
|
||||
|
||||
adding = components - old_components
|
||||
if adding:
|
||||
add = Add(device=device, components=list(adding))
|
||||
|
||||
# For the components we are adding, let's remove them from their old parents
|
||||
def g_parent(component: Component) -> int:
|
||||
return component.parent or Computer(id=0) # Computer with id 0 is our Identity
|
||||
|
||||
for parent, _components in groupby(sorted(add.components, key=g_parent), key=g_parent):
|
||||
if parent.id != 0:
|
||||
for parent, _components in groupby(sorted(adding, key=g_parent), key=g_parent):
|
||||
if parent.id != 0: # Is not Computer Identity
|
||||
events.append(Remove(device=parent, components=list(_components)))
|
||||
events.append(add)
|
||||
|
||||
removing = old_components - components
|
||||
if removing:
|
||||
events.append(Remove(device=device, components=list(removing)))
|
||||
return events
|
||||
|
|
|
@ -6,9 +6,9 @@ class DeviceView(View):
|
|||
def one(self, id: int):
|
||||
"""Gets one device."""
|
||||
device = Device.query.filter_by(id=id).one()
|
||||
return self.schema.jsonify_polymorphic(device)
|
||||
return self.schema.jsonify(device)
|
||||
|
||||
def find(self, args: dict):
|
||||
"""Gets many devices"""
|
||||
devices = Device.query.all()
|
||||
return self.schema.jsonify_polymorphic_many(devices)
|
||||
return self.schema.jsonify(devices, many=True)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from ereuse_devicehub.resources.event.schemas import Snapshot, Event
|
||||
from ereuse_devicehub.resources.event.schemas import Add, Event, Remove, Snapshot, Test, \
|
||||
TestHardDrive
|
||||
from ereuse_devicehub.resources.event.views import EventView, SnapshotView
|
||||
from teal.resource import Converters, Resource
|
||||
|
||||
|
@ -10,6 +11,22 @@ class EventDef(Resource):
|
|||
ID_CONVERTER = Converters.int
|
||||
|
||||
|
||||
class AddDef(EventDef):
|
||||
SCHEMA = Add
|
||||
|
||||
|
||||
class RemoveDef(EventDef):
|
||||
SCHEMA = Remove
|
||||
|
||||
|
||||
class SnapshotDef(EventDef):
|
||||
SCHEMA = Snapshot
|
||||
VIEW = SnapshotView
|
||||
|
||||
|
||||
class TestDef(EventDef):
|
||||
SCHEMA = Test
|
||||
|
||||
|
||||
class TestHardDriveDef(TestDef):
|
||||
SCHEMA = TestHardDrive
|
||||
|
|
|
@ -10,7 +10,7 @@ from sqlalchemy.orm import backref, relationship, validates
|
|||
from sqlalchemy_utils import ColorType
|
||||
|
||||
from ereuse_devicehub.db import db
|
||||
from ereuse_devicehub.resources.device.models import Device
|
||||
from ereuse_devicehub.resources.device.models import Component, Device
|
||||
from ereuse_devicehub.resources.event.enums import Appearance, Bios, Functionality, Orientation, \
|
||||
SoftwareType, StepTypes, TestHardDriveLength
|
||||
from ereuse_devicehub.resources.models import STR_BIG_SIZE, STR_SIZE, STR_SM_SIZE, Thing, \
|
||||
|
@ -49,10 +49,12 @@ class Event(Thing):
|
|||
author = relationship(User,
|
||||
backref=backref('events', lazy=True),
|
||||
primaryjoin=author_id == User.id)
|
||||
|
||||
components = relationship(Device,
|
||||
backref=backref('events_components', lazy=True),
|
||||
secondary=lambda: EventComponent.__table__)
|
||||
components = relationship(Component,
|
||||
backref=backref('events_components',
|
||||
lazy=True,
|
||||
order_by=lambda: Event.id),
|
||||
secondary=lambda: EventComponent.__table__,
|
||||
order_by=lambda: Device.id)
|
||||
|
||||
@declared_attr
|
||||
def __mapper_args__(cls):
|
||||
|
@ -63,8 +65,8 @@ class Event(Thing):
|
|||
extensions/declarative/api.html
|
||||
#sqlalchemy.ext.declarative.declared_attr>`_
|
||||
"""
|
||||
args = {POLYMORPHIC_ID: cls.__name__}
|
||||
if cls.__name__ == 'Event':
|
||||
args = {POLYMORPHIC_ID: cls.t}
|
||||
if cls.t == 'Event':
|
||||
args[POLYMORPHIC_ON] = cls.type
|
||||
if JoinedTableMixin in cls.mro():
|
||||
args[INHERIT_COND] = cls.id == Event.id
|
||||
|
@ -79,17 +81,29 @@ class EventComponent(db.Model):
|
|||
class EventWithOneDevice(Event):
|
||||
device_id = Column(BigInteger, ForeignKey(Device.id), nullable=False)
|
||||
device = relationship(Device,
|
||||
backref=backref('events_one', lazy=True, cascade=CASCADE),
|
||||
backref=backref('events_one',
|
||||
lazy=True,
|
||||
cascade=CASCADE,
|
||||
order_by=lambda: EventWithOneDevice.id),
|
||||
primaryjoin=Device.id == device_id)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<{0.t} {0.id!r} device={0.device!r}>'.format(self)
|
||||
|
||||
|
||||
class EventWithMultipleDevices(Event):
|
||||
"""
|
||||
Note that these events are not deleted when a device is deleted.
|
||||
"""
|
||||
devices = relationship(Device,
|
||||
backref=backref('events', lazy=True),
|
||||
secondary=lambda: EventDevice.__table__)
|
||||
backref=backref('events_multiple',
|
||||
lazy=True,
|
||||
order_by=lambda: EventWithMultipleDevices.id),
|
||||
secondary=lambda: EventDevice.__table__,
|
||||
order_by=lambda: Device.id)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<{0.t} {0.id!r} devices={0.devices!r}>'.format(self)
|
||||
|
||||
|
||||
class EventDevice(db.Model):
|
||||
|
@ -192,6 +206,7 @@ class Test(JoinedTableMixin, EventWithOneDevice):
|
|||
|
||||
|
||||
class TestHardDrive(Test):
|
||||
id = Column(BigInteger, ForeignKey(Test.id), primary_key=True)
|
||||
length = Column(DBEnum(TestHardDriveLength), nullable=False) # todo from type
|
||||
status = Column(Unicode(STR_SIZE), nullable=False)
|
||||
lifetime = Column(Interval, nullable=False)
|
||||
|
|
|
@ -30,7 +30,7 @@ class Event(Thing):
|
|||
'hardware without margin of doubt.')
|
||||
incidence = Boolean(default=False,
|
||||
description='Was something wrong in this event?')
|
||||
snapshot = NestedOn('Snapshot', dump_only=True, only='id')
|
||||
snapshot = NestedOn('Snapshot', dump_only=True)
|
||||
description = String(default='', description='A comment about the event.')
|
||||
components = NestedOn(Component, dump_only=True, many=True)
|
||||
|
||||
|
@ -40,7 +40,7 @@ class EventWithOneDevice(Event):
|
|||
|
||||
|
||||
class EventWithMultipleDevices(Event):
|
||||
device = NestedOn(Device, many=True, only='id')
|
||||
devices = NestedOn(Device, many=True, only='id')
|
||||
|
||||
|
||||
class Add(EventWithOneDevice):
|
||||
|
@ -124,8 +124,19 @@ class Inventory(Schema):
|
|||
|
||||
|
||||
class Snapshot(EventWithOneDevice):
|
||||
"""
|
||||
The Snapshot updates the state of the device with information about
|
||||
its components and events performed at them.
|
||||
|
||||
See docs for more info.
|
||||
"""
|
||||
device = NestedOn(Device) # todo and when dumping?
|
||||
components = NestedOn(Component, many=True)
|
||||
components = NestedOn(Component,
|
||||
many=True,
|
||||
description='A list of components that are inside of the device'
|
||||
'at the moment of this Snapshot.'
|
||||
'Order is preserved, so the component num 0 when'
|
||||
'submitting is the component num 0 when returning it back.')
|
||||
uuid = UUID(required=True)
|
||||
version = Version(required=True, description='The version of the SnapshotSoftware.')
|
||||
software = EnumField(SoftwareType,
|
||||
|
@ -138,7 +149,7 @@ class Snapshot(EventWithOneDevice):
|
|||
color = Color(description='Main color of the device.')
|
||||
orientation = EnumField(Orientation, description='Is the device main stand wider or larger?')
|
||||
force_creation = Boolean(data_key='forceCreation')
|
||||
events = NestedOn(Event, many=True)
|
||||
events = NestedOn(Event, many=True, dump_only=True)
|
||||
|
||||
@validates_schema
|
||||
def validate_workbench_version(self, data: dict):
|
||||
|
|
|
@ -12,7 +12,8 @@ from teal.resource import View
|
|||
class EventView(View):
|
||||
def one(self, id: int):
|
||||
"""Gets one event."""
|
||||
return Event.query.filter_by(id=id).one()
|
||||
event = Event.query.filter_by(id=id).one()
|
||||
return self.schema.jsonify(event)
|
||||
|
||||
|
||||
SUPPORTED_WORKBENCH = StrictVersion('11.0')
|
||||
|
@ -20,7 +21,11 @@ SUPPORTED_WORKBENCH = StrictVersion('11.0')
|
|||
|
||||
class SnapshotView(View):
|
||||
def post(self):
|
||||
"""Creates a Snapshot."""
|
||||
"""
|
||||
Performs a Snapshot.
|
||||
|
||||
See `Snapshot` section in docs for more info.
|
||||
"""
|
||||
s = request.get_json()
|
||||
# Note that if we set the device / components into the snapshot
|
||||
# model object, when we flush them to the db we will flush
|
||||
|
@ -31,8 +36,18 @@ class SnapshotView(View):
|
|||
snapshot = Snapshot(**s)
|
||||
snapshot.device, snapshot.events = Sync.run(device, components, snapshot.force_creation)
|
||||
snapshot.components = snapshot.device.components
|
||||
# commit will change the order of the components by what
|
||||
# the DB wants. Let's get a copy of the list so we preserve
|
||||
# order
|
||||
ordered_components = [c for c in snapshot.components]
|
||||
db.session.add(snapshot)
|
||||
db.session.flush() # Take to DB so we get db-generated values
|
||||
db.session.commit()
|
||||
# todo we are setting snapshot dirty again with this components but
|
||||
# we do not want to update it.
|
||||
# The real solution is https://stackoverflow.com/questions/
|
||||
# 24480581/set-the-insert-order-of-a-many-to-many-sqlalchemy-
|
||||
# flask-app-sqlite-db?noredirect=1&lq=1
|
||||
snapshot.components = ordered_components
|
||||
ret = self.schema.jsonify(snapshot) # transform it back
|
||||
ret.status_code = 201
|
||||
return ret
|
||||
|
|
|
@ -24,3 +24,6 @@ class User(Thing):
|
|||
"""
|
||||
name = Column(Unicode(length=STR_SIZE))
|
||||
token = Column(UUID(as_uuid=True), default=uuid4, unique=True)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<{0.t} {0.id!r} email={0.email!r}>'.format(self)
|
||||
|
|
|
@ -10,7 +10,14 @@ components:
|
|||
type: 'Motherboard'
|
||||
- manufacturer: 'p1c2m'
|
||||
serialNumber: 'p1c2s'
|
||||
model: 'p1'
|
||||
model: 'p1c2'
|
||||
speed: 1.23
|
||||
cores: 2
|
||||
type: 'Processor'
|
||||
condition:
|
||||
appearance: 'A'
|
||||
functionality: 'B'
|
||||
elapsed: 25
|
||||
software: 'Workbench'
|
||||
uuid: 'f2e02261-87a1-4a50-b9b7-92c0e476e5f2'
|
||||
version: '11.0'
|
|
@ -1,5 +1,5 @@
|
|||
device:
|
||||
manufactuer: 'p1'
|
||||
manufacturer: 'p1'
|
||||
serialNumber: 'p1'
|
||||
model: 'p1'
|
||||
type: 'Desktop'
|
||||
|
@ -7,7 +7,7 @@ secured: False
|
|||
components:
|
||||
- manufacturer: 'p1c2m'
|
||||
serialNumber: 'p1c2s'
|
||||
model: 'p1'
|
||||
model: 'p1c2'
|
||||
type: 'Processor'
|
||||
cores: 2
|
||||
speed: 1.23
|
||||
|
@ -15,3 +15,10 @@ components:
|
|||
serialNumber: 'p1c3s'
|
||||
type: 'GraphicCard'
|
||||
memory: 1.5
|
||||
condition:
|
||||
appearance: 'C'
|
||||
functionality: 'C'
|
||||
elapsed: 30
|
||||
software: 'Workbench'
|
||||
uuid: '3be271b6-5ef4-47d8-8237-5e1133eebfc6'
|
||||
version: '11.0'
|
|
@ -1,5 +1,5 @@
|
|||
device:
|
||||
manufactuer: 'p1'
|
||||
manufacturer: 'p1'
|
||||
serialNumber: 'p1'
|
||||
model: 'p1'
|
||||
type: 'Desktop'
|
||||
|
@ -13,3 +13,10 @@ components:
|
|||
serialNumber: 'p1c3s'
|
||||
type: 'GraphicCard'
|
||||
memory: 1.5
|
||||
condition:
|
||||
appearance: 'A'
|
||||
functionality: 'A'
|
||||
elapsed: 25
|
||||
software: 'Workbench'
|
||||
uuid: 'fd007eb4-48e3-454a-8763-169491904c6e'
|
||||
version: '11.0'
|
|
@ -1,3 +1,6 @@
|
|||
from datetime import timedelta
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
|
||||
from ereuse_devicehub.client import UserClient
|
||||
|
@ -8,7 +11,8 @@ from ereuse_devicehub.resources.device.models import Component, Computer, Deskto
|
|||
GraphicCard, Laptop, Microtower, Motherboard, NetworkAdapter
|
||||
from ereuse_devicehub.resources.device.schemas import Device as DeviceS
|
||||
from ereuse_devicehub.resources.device.sync import Sync
|
||||
from ereuse_devicehub.resources.event.models import Add, Remove
|
||||
from ereuse_devicehub.resources.event.models import Remove, Test
|
||||
from ereuse_devicehub.resources.user import User
|
||||
from teal.db import ResourceNotFound
|
||||
from tests.conftest import file
|
||||
|
||||
|
@ -58,7 +62,7 @@ def test_device_schema():
|
|||
"""Ensures the user does not upload non-writable or extra fields."""
|
||||
device_s = DeviceS()
|
||||
device_s.load({'serialNumber': 'foo1', 'model': 'foo', 'manufacturer': 'bar2'})
|
||||
device_s.dump({'id': 1})
|
||||
device_s.dump(Device(id=1))
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('app_context')
|
||||
|
@ -132,16 +136,12 @@ def test_add_remove():
|
|||
# Test:
|
||||
# pc has only c3
|
||||
events = Sync.add_remove(device=pc, components={c3, c4})
|
||||
assert len(events) == 3
|
||||
db.session.add_all(events)
|
||||
db.session.commit() # We enforce the appliance of order_by
|
||||
assert len(events) == 1
|
||||
assert isinstance(events[0], Remove)
|
||||
assert events[0].device == pc2
|
||||
assert events[0].components == [c3]
|
||||
assert isinstance(events[1], Add)
|
||||
assert events[1].device == pc
|
||||
assert set(events[1].components) == {c3, c4}
|
||||
assert isinstance(events[2], Remove)
|
||||
assert events[2].device == pc
|
||||
assert set(events[2].components) == {c1, c2}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('app_context')
|
||||
|
@ -185,9 +185,19 @@ def test_get_device(app: Devicehub, user: UserClient):
|
|||
GraphicCard(model='c2mo', manufacturer='c2ma', memory=1500)
|
||||
]
|
||||
db.session.add(pc)
|
||||
db.session.add(Test(device=pc,
|
||||
elapsed=timedelta(seconds=4),
|
||||
success=True,
|
||||
author=User(email='bar@bar.com')))
|
||||
db.session.commit()
|
||||
pc, _ = user.get(res=Device, item=1)
|
||||
assert pc['events'] == []
|
||||
assert len(pc['events']) == 1
|
||||
assert pc['events'][0]['type'] == 'Test'
|
||||
assert pc['events'][0]['id'] == 1
|
||||
assert pc['events'][0]['device'] == 1
|
||||
assert pc['events'][0]['elapsed'] == 4
|
||||
assert pc['events'][0]['success'] == True
|
||||
assert UUID(pc['events'][0]['author'])
|
||||
assert 'events_components' not in pc, 'events_components are internal use only'
|
||||
assert 'events_one' not in pc, 'they are internal use only'
|
||||
assert 'author' not in pc
|
||||
|
|
|
@ -8,8 +8,8 @@ from ereuse_devicehub.client import UserClient
|
|||
from ereuse_devicehub.db import db
|
||||
from ereuse_devicehub.devicehub import Devicehub
|
||||
from ereuse_devicehub.resources.device.models import Device, Microtower
|
||||
from ereuse_devicehub.resources.event.models import Appearance, Bios, Functionality, Snapshot, \
|
||||
SnapshotRequest, SoftwareType
|
||||
from ereuse_devicehub.resources.event.models import Appearance, Bios, Event, Functionality, \
|
||||
Snapshot, SnapshotRequest, SoftwareType
|
||||
from ereuse_devicehub.resources.user.models import User
|
||||
from tests.conftest import file
|
||||
|
||||
|
@ -36,18 +36,28 @@ def assert_similar_components(components1: List[dict], components2: List[dict]):
|
|||
|
||||
def snapshot_and_check(user: UserClient,
|
||||
input_snapshot: dict,
|
||||
num_events: int = 0,
|
||||
event_types: tuple or list = tuple(),
|
||||
perform_second_snapshot=True) -> dict:
|
||||
"""
|
||||
|
||||
P
|
||||
"""
|
||||
snapshot, _ = user.post(res=Snapshot, data=input_snapshot)
|
||||
assert len(snapshot['events']) == num_events
|
||||
assert tuple(e['type'] for e in snapshot['events']) == event_types
|
||||
# Ensure there is no Remove event after the first Add
|
||||
found_add = False
|
||||
for event in snapshot['events']:
|
||||
if event['type'] == 'Add':
|
||||
found_add = True
|
||||
if found_add:
|
||||
assert event['type'] != 'Receive', 'All Remove events must be before the Add ones'
|
||||
assert input_snapshot['device']
|
||||
assert_similar_device(input_snapshot['device'], snapshot['device'])
|
||||
assert_similar_components(input_snapshot['components'], snapshot['components'])
|
||||
assert all(c['parent'] == snapshot['device']['id'] for c in snapshot['components']), \
|
||||
'Components must be in their parent'
|
||||
if perform_second_snapshot:
|
||||
return snapshot_and_check(user, input_snapshot, num_events, False)
|
||||
input_snapshot['uuid'] = uuid4()
|
||||
return snapshot_and_check(user, input_snapshot, perform_second_snapshot=False)
|
||||
else:
|
||||
return snapshot
|
||||
|
||||
|
@ -99,7 +109,7 @@ def test_snapshot_post(user: UserClient):
|
|||
Tests the post snapshot endpoint (validation, etc)
|
||||
and data correctness.
|
||||
"""
|
||||
snapshot = snapshot_and_check(user, file('basic.snapshot'))
|
||||
snapshot = snapshot_and_check(user, file('basic.snapshot'), perform_second_snapshot=False)
|
||||
assert snapshot['software'] == 'Workbench'
|
||||
assert snapshot['version'] == '11.0'
|
||||
assert snapshot['uuid'] == 'f5efd26e-8754-46bc-87bf-fbccc39d60d9'
|
||||
|
@ -111,9 +121,104 @@ def test_snapshot_post(user: UserClient):
|
|||
|
||||
|
||||
def test_snapshot_add_remove(user: UserClient):
|
||||
s1 = file('1-device-with-components.snapshot')
|
||||
snapshot_and_check(user, s1)
|
||||
def get_events_info(events: List[dict]) -> tuple:
|
||||
return tuple(
|
||||
(
|
||||
e['id'],
|
||||
e['type'],
|
||||
[c['serialNumber'] for c in e['components']],
|
||||
e.get('snapshot', {}).get('id', None)
|
||||
)
|
||||
for e in (user.get(res=Event, item=e['id'])[0] for e in events)
|
||||
)
|
||||
|
||||
# We add the first device (2 times). The distribution of components
|
||||
# (represented with their S/N) should be:
|
||||
# PC 1: p1c1s, p1c2s, p1c3s. PC 2: ø
|
||||
s1 = file('1-device-with-components.snapshot')
|
||||
snapshot1 = snapshot_and_check(user, s1, perform_second_snapshot=False)
|
||||
pc1_id = snapshot1['device']['id']
|
||||
pc1, _ = user.get(res=Device, item=pc1_id)
|
||||
# Parent contains components
|
||||
assert tuple(c['serialNumber'] for c in pc1['components']) == ('p1c1s', 'p1c2s', 'p1c3s')
|
||||
# Components contain parent
|
||||
assert all(c['parent'] == pc1_id for c in pc1['components'])
|
||||
# pc has Snapshot as event
|
||||
assert len(pc1['events']) == 1
|
||||
assert pc1['events'][0]['type'] == Snapshot.t
|
||||
# p1c1s has Snapshot
|
||||
p1c1s, _ = user.get(res=Device, item=pc1['components'][0]['id'])
|
||||
assert tuple(e['type'] for e in p1c1s['events']) == ('Snapshot',)
|
||||
|
||||
# We register a new device
|
||||
# It has the processor of the first one (p1c2s)
|
||||
# PC 1: p1c1s, p1c3s. PC 2: p2c1s, p1c2s
|
||||
# Events PC1: Snapshot, Remove. PC2: Snapshot
|
||||
s2 = file('2-second-device-with-components-of-first.snapshot')
|
||||
# num_events = 2 = Remove, Add
|
||||
snapshot2 = snapshot_and_check(user, s2, event_types=('Remove', ),
|
||||
perform_second_snapshot=False)
|
||||
pc2_id = snapshot2['device']['id']
|
||||
pc1, _ = user.get(res=Device, item=pc1_id)
|
||||
pc2, _ = user.get(res=Device, item=pc2_id)
|
||||
# PC1
|
||||
assert tuple(c['serialNumber'] for c in pc1['components']) == ('p1c1s', 'p1c3s')
|
||||
assert all(c['parent'] == pc1_id for c in pc1['components'])
|
||||
assert tuple(e['type'] for e in pc1['events']) == ('Snapshot', 'Remove')
|
||||
# PC2
|
||||
assert tuple(c['serialNumber'] for c in pc2['components']) == ('p1c2s', 'p2c1s')
|
||||
assert all(c['parent'] == pc2_id for c in pc2['components'])
|
||||
assert tuple(e['type'] for e in pc2['events']) == ('Snapshot', )
|
||||
# p1c2s has two Snapshots, a Remove and an Add
|
||||
p1c2s, _ = user.get(res=Device, item=pc2['components'][0]['id'])
|
||||
assert tuple(e['type'] for e in p1c2s['events']) == ('Snapshot', 'Snapshot', 'Remove')
|
||||
|
||||
# We register the first device again, but removing motherboard
|
||||
# and moving processor from the second device to the first.
|
||||
# We have created 1 Remove (from PC2's processor back to PC1)
|
||||
# PC 0: p1c2s, p1c3s. PC 1: p2c1s
|
||||
s3 = file('3-first-device-but-removing-motherboard-and-adding-processor-from-2.snapshot')
|
||||
snapshot_and_check(user, s3, ('Remove', ), perform_second_snapshot=False)
|
||||
pc1, _ = user.get(res=Device, item=pc1_id)
|
||||
pc2, _ = user.get(res=Device, item=pc2_id)
|
||||
# PC1
|
||||
assert {c['serialNumber'] for c in pc1['components']} == {'p1c2s', 'p1c3s'}
|
||||
assert all(c['parent'] == pc1_id for c in pc1['components'])
|
||||
assert get_events_info(pc1['events']) == (
|
||||
# id, type, components, snapshot
|
||||
(1, 'Snapshot', ['p1c1s', 'p1c2s', 'p1c3s'], None), # first Snapshot1
|
||||
(3, 'Remove', ['p1c2s'], 2), # Remove Processor in Snapshot2
|
||||
(4, 'Snapshot', ['p1c2s', 'p1c3s'], None) # This Snapshot3
|
||||
)
|
||||
# PC2
|
||||
assert tuple(c['serialNumber'] for c in pc2['components']) == ('p2c1s',)
|
||||
assert all(c['parent'] == pc2_id for c in pc2['components'])
|
||||
assert tuple(e['type'] for e in pc2['events']) == (
|
||||
'Snapshot', # Second Snapshot
|
||||
'Remove' # the processor we added in 2.
|
||||
)
|
||||
# p1c2s has Snapshot, Remove and Add
|
||||
p1c2s, _ = user.get(res=Device, item=pc1['components'][0]['id'])
|
||||
assert get_events_info(p1c2s['events']) == (
|
||||
(1, 'Snapshot', ['p1c1s', 'p1c2s', 'p1c3s'], None), # First Snapshot to PC1
|
||||
(2, 'Snapshot', ['p1c2s', 'p2c1s'], None), # Second Snapshot to PC2
|
||||
(3, 'Remove', ['p1c2s'], 2), # ...which caused p1c2s to be removed form PC1
|
||||
(4, 'Snapshot', ['p1c2s', 'p1c3s'], None), # The third Snapshot to PC1
|
||||
(5, 'Remove', ['p1c2s'], 4) # ...which caused p1c2 to be removed from PC2
|
||||
)
|
||||
|
||||
# We register the first device but without the processor,
|
||||
# adding a graphic card and adding a new component
|
||||
s4 = file('4-first-device-but-removing-processor.snapshot-and-adding-graphic-card')
|
||||
snapshot_and_check(user, s4, perform_second_snapshot=False)
|
||||
pc1, _ = user.get(res=Device, item=pc1_id)
|
||||
pc2, _ = user.get(res=Device, item=pc2_id)
|
||||
# PC 0: p1c3s, p1c4s. PC1: p2c1s
|
||||
assert {c['serialNumber'] for c in pc1['components']} == {'p1c3s', 'p1c4s'}
|
||||
assert all(c['parent'] == pc1_id for c in pc1['components'])
|
||||
# This last Snapshot only
|
||||
assert get_events_info(pc1['events'])[-1] == (6, 'Snapshot', ['p1c3s', 'p1c4s'], None)
|
||||
# PC2
|
||||
# We haven't changed PC2
|
||||
assert tuple(c['serialNumber'] for c in pc2['components']) == ('p2c1s',)
|
||||
assert all(c['parent'] == pc2_id for c in pc2['components'])
|
||||
|
|
Reference in New Issue