Update documentation

Correct sphinx warning
This commit is contained in:
Jerome Kieffer 2023-01-05 10:36:34 +01:00
parent 317f706d5d
commit dc7acee33a
15 changed files with 64 additions and 121 deletions

View File

@ -73,14 +73,6 @@ pyFAI.ext.relabel module
:undoc-members:
:show-inheritance:
pyFAI.ext.setup module
----------------------
.. automodule:: pyFAI.ext.setup
:members:
:undoc-members:
:show-inheritance:
pyFAI.ext.sparse_builder module
-------------------------------
@ -129,14 +121,6 @@ pyFAI.ext.splitPixel module
:undoc-members:
:show-inheritance:
pyFAI.ext.splitPixelFull module
-------------------------------
.. automodule:: pyFAI.ext.splitPixelFull
:members:
:undoc-members:
:show-inheritance:
pyFAI.ext.splitPixelFullCSR module
----------------------------------

View File

@ -49,14 +49,6 @@ pyFAI.gui.utils.projecturl module
:undoc-members:
:show-inheritance:
pyFAI.gui.utils.setup module
----------------------------
.. automodule:: pyFAI.gui.utils.setup
:members:
:undoc-members:
:show-inheritance:
pyFAI.gui.utils.timeutils module
--------------------------------

View File

@ -73,14 +73,6 @@ pyFAI.gui.widgets.DetectorSelector module
:undoc-members:
:show-inheritance:
pyFAI.gui.widgets.ElidedLabel module
------------------------------------
.. automodule:: pyFAI.gui.widgets.ElidedLabel
:members:
:undoc-members:
:show-inheritance:
pyFAI.gui.widgets.FileEdit module
---------------------------------

View File

@ -10,7 +10,7 @@ expect after having launched *Jupyter notebook* (or ipython) and typed:
The most important class is AzimuthalIntegrator which is an object containing
both the geometry (it inherits from Geometry, another class)
and exposes important methods (functions) like `integrate1d` and `integrate2d.
and exposes important methods (functions) like `integrate1d` and `integrate2d`.
.. toctree::
:maxdepth: 2

View File

@ -49,14 +49,6 @@ pyFAI.opencl.preproc module
:undoc-members:
:show-inheritance:
pyFAI.opencl.setup module
-------------------------
.. automodule:: pyFAI.opencl.setup
:members:
:undoc-members:
:show-inheritance:
pyFAI.opencl.sort module
------------------------

View File

@ -1,15 +1,14 @@
pyFAI.resources package
=======================
pyFAI.resources.setup module
pyFAI.resources.__init__ module
----------------------------
.. automodule:: pyFAI.resources.setup
.. automodule:: pyFAI.resources.__init__
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------

View File

@ -57,14 +57,6 @@ pyFAI.utils.orderedset module
:undoc-members:
:show-inheritance:
pyFAI.utils.setup module
------------------------
.. automodule:: pyFAI.utils.setup
:members:
:undoc-members:
:show-inheritance:
pyFAI.utils.shell module
------------------------

View File

@ -122,6 +122,6 @@ Bugs:
.....
Bugs: Many, see hereafter: 1)If the number of files is too large, use
double quotes "*.edf" 2)There is a known bug on Debian7 where importing
double quotes "\*.edf" 2)There is a known bug on Debian7 where importing
a large number of file can take much longer than the integration itself:
consider passing files in the command line

File diff suppressed because one or more lines are too long

View File

@ -30,7 +30,7 @@ __author__ = "Jérôme Kieffer"
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "04/10/2022"
__date__ = "05/01/2023"
__status__ = "stable"
__docformat__ = 'restructuredtext'
@ -3074,7 +3074,7 @@ class AzimuthalIntegrator(Geometry):
Keep only pixels with intensty:
|I - <I>| < thres * std(I)
\|I - <I>\| < thres * std(I)
This enforces a gaussian distibution and is very good at extracting
background or amorphous isotropic scattering out of Bragg peaks.

View File

@ -36,7 +36,7 @@ __author__ = "Jerome Kieffer"
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "25/06/2020"
__date__ = "05/01/2023"
__status__ = "production"
@ -116,10 +116,10 @@ class CylindricalDetector(Detector):
:param correct_binning: If True, check that the produced array have the right shape regarding binning
:param use_cython: set to False for testing
:return: 4D array containing:
pixel index (slow dimension)
pixel index (fast dimension)
corner index (A, B, C or D), triangles or hexagons can be handled the same way
vertex position (z,y,x)
pixel index (slow dimension)
pixel index (fast dimension)
corner index (A, B, C or D), triangles or hexagons can be handled the same way
vertex position (z,y,x)
"""
if self._pixel_corners is None:
with self._sem:

View File

@ -26,7 +26,7 @@ __author__ = "Jerome Kieffer"
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "13/07/2022"
__date__ = "05/01/2023"
__status__ = "development"
import logging
@ -175,6 +175,7 @@ class CsrIntegrator1d(CSRIntegrator):
:param unit: the kind of radial units
:param bin_center: position of the bin center
:param mask_checksum: just a place-holder to track which mask was used
Nota: bins value is deduced from the dimentionality of bin_centers
"""
self.bin_centers = bin_centers
@ -291,9 +292,10 @@ class CsrIntegrator1d(CSRIntegrator):
:param safe: Unused in this implementation
:param error_model: Enum or str, "azimuthal" or "poisson"
:param normalization_factor: divide raw signal by this value
:param cutoff: discard all points with |value - avg| > cutoff * sigma. 3-4 is quite common
:param cutoff: discard all points with \|value - avg\| > cutoff * sigma. 3-4 is quite common
:param cycle: perform at maximum this number of cycles. 5 is common.
:return: namedtuple with "position intensity error signal variance normalization count"
"""
shape = data.shape
error_model = ErrorModel.parse(error_model)

View File

@ -33,7 +33,7 @@ __author__ = "Jérôme Kieffer"
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "28/03/2022"
__date__ = "05/01/2023"
__status__ = "production"
import os
@ -323,7 +323,7 @@ class PeakPicker(object):
return gpt
def onclick_new_grp(self, yx, ring):
" * new_grp Right-click (click+n): try an auto find for a ring"
"new_grp Right-click (click+n): try an auto find for a ring"
# ydata is a float, and matplotlib display pixels centered.
# we use floor (int cast) instead of round to avoid use of
# banker's rounding
@ -337,7 +337,7 @@ class PeakPicker(object):
logger.warning("No peak found !!!")
def onclick_single_point(self, yx, ring):
" * Right-click + Ctrl (click+b): create new group with one single point"
"Right-click + Ctrl (click+b): create new group with one single point"
newpeak = self.massif.nearest_peak(yx)
if newpeak:
gpt = self._common_creation([newpeak], ring=ring)
@ -346,7 +346,7 @@ class PeakPicker(object):
logger.warning("No peak found !!!")
def onclick_append_more_points(self, yx, ring):
" * Right-click + m (click+m): find more points for current group"
"Right-click + m (click+m): find more points for current group"
gpt = self.points.get(ring)
if gpt:
self.widget.remove_grp(gpt.label, update=False)
@ -364,7 +364,8 @@ class PeakPicker(object):
self.onclick_new_grp(yx, ring)
def onclick_append_1_point(self, yx, ring=None):
""" * Right-click + Shift (click+v): add one point to current group
"""Right-click + Shift (click+v): add one point to current group
:param xy: 2tuple of coordinates
"""
gpt = self.points.get(ring)
@ -382,7 +383,7 @@ class PeakPicker(object):
self.onclick_new_grp(yx, ring)
def onclick_erase_grp(self, yx, ring):
" * Center-click or (click+d): erase current group"
"Center-click or (click+d): erase current group"
gpt = self.points.pop(ring)
if gpt:
self.widget.remove_grp(gpt.label, update=True)
@ -394,7 +395,7 @@ class PeakPicker(object):
logger.warning("No group of points for ring %s", ring)
def onclick_erase_1_point(self, yx, ring):
" * Center-click + 1 or (click+1): erase closest point from current group"
"Center-click + 1 or (click+1): erase closest point from current group"
gpt = self.points.get(ring)
if not gpt:
self.widget.remove_grp(gpt.label, update=True)

View File

@ -42,7 +42,7 @@ __author__ = "Jerome Kieffer"
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "04/10/2022"
__date__ = "05/01/2023"
__status__ = "production"
__docformat__ = 'restructuredtext'
@ -855,13 +855,14 @@ class FabioWriter(Writer):
"""
def __init__(self, filename=None, extension=None, directory="", prefix=None, index_format="_%04d", start_index=0, fabio_class=None):
"""
"""Constructor of the class
:param filename:
:param extension:
:param prefix: basename of the file
:param index_format: "_%04s" gives "_0001" for example
:param start_index: often 0 or 1
:param
:param fabio_class: type of file to write
"""
Writer.__init__(self, filename, extension)
self.header = {}

View File

@ -28,7 +28,7 @@
__authors__ = ["Jérôme Kieffer", "Giannis Ashiotis"]
__license__ = "MIT"
__date__ = "17/11/2022"
__date__ = "05/01/2023"
__copyright__ = "2014-2021, ESRF, Grenoble"
__contact__ = "jerome.kieffer@esrf.fr"
@ -861,7 +861,7 @@ class OCL_CSR_Integrator(OpenclProcessing):
:param preprocess_only: return the dark subtracted; flat field & solidangle & polarization corrected image, else
:param error_model: enum ErrorModel
:param normalization_factor: divide raw signal by this value
:param cutoff: discard all points with |value - avg| > cutoff * sigma. 3-4 is quite common
:param cutoff: discard all points with ``|value - avg\| > cutoff * sigma``. 3-4 is quite common
:param cycle: perform at maximum this number of cycles. 5 is common.
:param out_avgint: destination array or pyopencl array for sum of all data
:param out_sem: destination array or pyopencl array for uncertainty on mean value