diff --git a/_static/alabaster.css b/_static/alabaster.css index 4c500fb..fc325ec 100644 --- a/_static/alabaster.css +++ b/_static/alabaster.css @@ -419,7 +419,9 @@ table.footnote td { } dl { - margin: 0; + margin-left: 0; + margin-right: 0; + margin-top: 0; padding: 0; } diff --git a/_static/doctools.js b/_static/doctools.js index c3db08d..527b876 100644 --- a/_static/doctools.js +++ b/_static/doctools.js @@ -10,6 +10,13 @@ */ "use strict"; +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + const _ready = (callback) => { if (document.readyState !== "loading") { callback(); @@ -18,73 +25,11 @@ const _ready = (callback) => { } }; -/** - * highlight a given string on a node by wrapping it in - * span elements with the given class name. - */ -const _highlight = (node, addItems, text, className) => { - if (node.nodeType === Node.TEXT_NODE) { - const val = node.nodeValue; - const parent = node.parentNode; - const pos = val.toLowerCase().indexOf(text); - if ( - pos >= 0 && - !parent.classList.contains(className) && - !parent.classList.contains("nohighlight") - ) { - let span; - - const closestNode = parent.closest("body, svg, foreignObject"); - const isInSVG = closestNode && closestNode.matches("svg"); - if (isInSVG) { - span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); - } else { - span = document.createElement("span"); - span.classList.add(className); - } - - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - parent.insertBefore( - span, - parent.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling - ) - ); - node.nodeValue = val.substr(0, pos); - - if (isInSVG) { - const rect = document.createElementNS( - "http://www.w3.org/2000/svg", - "rect" - ); - const bbox = parent.getBBox(); - rect.x.baseVal.value = bbox.x; - rect.y.baseVal.value = bbox.y; - rect.width.baseVal.value = bbox.width; - rect.height.baseVal.value = bbox.height; - rect.setAttribute("class", className); - addItems.push({ parent: parent, target: rect }); - } - } - } else if (node.matches && !node.matches("button, select, textarea")) { - node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); - } -}; -const _highlightText = (thisNode, text, className) => { - let addItems = []; - _highlight(thisNode, addItems, text, className); - addItems.forEach((obj) => - obj.parent.insertAdjacentElement("beforebegin", obj.target) - ); -}; - /** * Small JavaScript module for the documentation. */ const Documentation = { init: () => { - Documentation.highlightSearchWords(); Documentation.initDomainIndexTable(); Documentation.initOnKeyListeners(); }, @@ -126,51 +71,6 @@ const Documentation = { Documentation.LOCALE = catalog.locale; }, - /** - * highlight the search words provided in the url in the text - */ - highlightSearchWords: () => { - const highlight = - new URLSearchParams(window.location.search).get("highlight") || ""; - const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); - if (terms.length === 0) return; // nothing to do - - // There should never be more than one element matching "div.body" - const divBody = document.querySelectorAll("div.body"); - const body = divBody.length ? divBody[0] : document.querySelector("body"); - window.setTimeout(() => { - terms.forEach((term) => _highlightText(body, term, "highlighted")); - }, 10); - - const searchBox = document.getElementById("searchbox"); - if (searchBox === null) return; - searchBox.appendChild( - document - .createRange() - .createContextualFragment( - '
' + - '' + - Documentation.gettext("Hide Search Matches") + - "
" - ) - ); - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords: () => { - document - .querySelectorAll("#searchbox .highlight-link") - .forEach((el) => el.remove()); - document - .querySelectorAll("span.highlighted") - .forEach((el) => el.classList.remove("highlighted")); - const url = new URL(window.location); - url.searchParams.delete("highlight"); - window.history.replaceState({}, "", url); - }, - /** * helper function to focus on search bar */ @@ -210,15 +110,11 @@ const Documentation = { ) return; - const blacklistedElements = new Set([ - "TEXTAREA", - "INPUT", - "SELECT", - "BUTTON", - ]); document.addEventListener("keydown", (event) => { - if (blacklistedElements.has(document.activeElement.tagName)) return; // bail for input elements - if (event.altKey || event.ctrlKey || event.metaKey) return; // bail with special keys + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; if (!event.shiftKey) { switch (event.key) { @@ -240,10 +136,6 @@ const Documentation = { event.preventDefault(); } break; - case "Escape": - if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; - Documentation.hideSearchWords(); - event.preventDefault(); } } diff --git a/_static/searchtools.js b/_static/searchtools.js index f2fb7d5..e89e34d 100644 --- a/_static/searchtools.js +++ b/_static/searchtools.js @@ -57,14 +57,14 @@ const _removeChildren = (element) => { const _escapeRegExp = (string) => string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string -const _displayItem = (item, highlightTerms, searchTerms) => { +const _displayItem = (item, searchTerms) => { const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT; const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; - const [docName, title, anchor, descr] = item; + const [docName, title, anchor, descr, score, _filename] = item; let listItem = document.createElement("li"); let requestUrl; @@ -82,10 +82,9 @@ const _displayItem = (item, highlightTerms, searchTerms) => { requestUrl = docUrlRoot + docName + docFileSuffix; linkUrl = docName + docLinkSuffix; } - const params = new URLSearchParams(); - params.set("highlight", [...highlightTerms].join(" ")); let linkEl = listItem.appendChild(document.createElement("a")); - linkEl.href = linkUrl + "?" + params.toString() + anchor; + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; linkEl.innerHTML = title; if (descr) listItem.appendChild(document.createElement("span")).innerHTML = @@ -96,7 +95,7 @@ const _displayItem = (item, highlightTerms, searchTerms) => { .then((data) => { if (data) listItem.appendChild( - Search.makeSearchSummary(data, searchTerms, highlightTerms) + Search.makeSearchSummary(data, searchTerms) ); }); Search.output.appendChild(listItem); @@ -116,15 +115,14 @@ const _finishSearch = (resultCount) => { const _displayNextItem = ( results, resultCount, - highlightTerms, searchTerms ) => { // results left, load the summary and display it // this is intended to be dynamic (don't sub resultsCount) if (results.length) { - _displayItem(results.pop(), highlightTerms, searchTerms); + _displayItem(results.pop(), searchTerms); setTimeout( - () => _displayNextItem(results, resultCount, highlightTerms, searchTerms), + () => _displayNextItem(results, resultCount, searchTerms), 5 ); } @@ -237,6 +235,12 @@ const Search = { * execute search (requires search index to be loaded) */ query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + // stem the search terms and add them to the correct list const stemmer = new Stemmer(); const searchTerms = new Set(); @@ -264,6 +268,10 @@ const Search = { } }); + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + // console.debug("SEARCH: searching for:"); // console.info("required: ", [...searchTerms]); // console.info("excluded: ", [...excludedTerms]); @@ -272,6 +280,40 @@ const Search = { let results = []; _removeChildren(document.getElementById("search-progress")); + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + // lookup as object objectTerms.forEach((term) => results.push(...Search.performObjectSearch(term, objectTerms)) @@ -318,7 +360,7 @@ const Search = { // console.info("search results:", Search.lastresults); // print the results - _displayNextItem(results, results.length, highlightTerms, searchTerms); + _displayNextItem(results, results.length, searchTerms); }, /** @@ -399,8 +441,8 @@ const Search = { // prepare search const terms = Search._index.terms; const titleTerms = Search._index.titleterms; - const docNames = Search._index.docnames; const filenames = Search._index.filenames; + const docNames = Search._index.docnames; const titles = Search._index.titles; const scoreMap = new Map(); @@ -497,11 +539,9 @@ const Search = { /** * helper function to return a node containing the * search summary for a given text. keywords is a list - * of stemmed words, highlightWords is the list of normal, unstemmed - * words. the first one is used to find the occurrence, the - * latter for highlighting it. + * of stemmed words. */ - makeSearchSummary: (htmlText, keywords, highlightWords) => { + makeSearchSummary: (htmlText, keywords) => { const text = Search.htmlToText(htmlText); if (text === "") return null; @@ -519,10 +559,6 @@ const Search = { summary.classList.add("context"); summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; - highlightWords.forEach((highlightWord) => - _highlightText(summary, highlightWord, "highlighted") - ); - return summary; }, }; diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 0000000..aae669d --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,144 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + parent.insertBefore( + span, + parent.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '' + + '' + + _("Hide Search Matches") + + "
" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(SphinxHighlight.highlightSearchWords); +_ready(SphinxHighlight.initEscapeListener); diff --git a/api.html b/api.html index 0b7d1ad..37aa0f1 100644 --- a/api.html +++ b/api.html @@ -14,6 +14,7 @@ + @@ -58,7 +59,22 @@Contents:
equation (str
) – An equation in einsum syntax.
equation (str
) – An equation in einsum syntax.
Einsum where addition and multiplication have their usual meanings.
This function has different memory and runtime characteristics than
-torch.einsum()
, which can be tuned with block_size
. Higher
+torch.einsum()
, which can be tuned with block_size
. Higher
values of block_size
result in faster runtime and higher memory usage.
In some cases, when dealing with summations over more than two input
tensors at once, this implementation can have better space complexity than
-torch.einsum()
, because it does not create intermediate tensors
+torch.einsum()
, because it does not create intermediate tensors
whose sizes are proportional to the dimensions being summed over.
equation (Equation
) – A pre-compiled equation.
args (Tensor
) – Input tensors. The number of input tensors must be compatible
+
args (Tensor
) – Input tensors. The number of input tensors must be compatible
with equation
.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
Output of einsum.
@@ -176,16 +192,16 @@equation (Equation
) – Pre-compiled einsum equation. The derivative of the
einsum operation specified by this equation will be computed.
args (Sequence
[Tensor
]) – The inputs to the einsum operation whose derivative
+
args (Sequence
[Tensor
]) – The inputs to the einsum operation whose derivative
is being computed.
needs_grad (Sequence
[bool
]) – Indicates which inputs in args
require gradient.
grad (Tensor
) – The gradient of the loss function with respect to the output
+
needs_grad (Sequence
[bool
]) – Indicates which inputs in args
require gradient.
grad (Tensor
) – The gradient of the loss function with respect to the output
of the einsum operation.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
The gradients with respect to each of the inputs to the @@ -204,18 +220,18 @@
save_max (bool
) – If true, save the tensor of maximum terms computed in the
+
save_max (bool
) – If true, save the tensor of maximum terms computed in the
forward pass and reuse it in the backward pass. This tensor has the
same size as the output tensor. Setting this to false will save memory
but increase runtime.
save_sumexpsub (bool
) – If true, save the tensor of sums of terms computed
+
save_sumexpsub (bool
) – If true, save the tensor of sums of terms computed
in the forward pass and reuse it in the backward pass. This tensor has
the same size as the output tensor. Setting this to false will save
memory but increase runtime.
equation (Equation
) – A pre-compiled equation.
args (Tensor
) – Input tensors. The number of input tensors must be compatible
+
args (Tensor
) – Input tensors. The number of input tensors must be compatible
with equation
.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
return_max (bool
) – If true, also return the tensor of maximum terms, which
+
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
return_max (bool
) – If true, also return the tensor of maximum terms, which
can be reused when computing the gradient.
return_sumexpsub (bool
) – If true, also return the tensor of sums of terms
+
return_sumexpsub (bool
) – If true, also return the tensor of sums of terms
(where the maximum term has been subtracted from each term), which can
be reused when computing the gradient.
Output of einsum. If return_max
or return_sumexpsub
is
@@ -262,17 +278,17 @@
equation (Equation
) – Pre-compiled einsum equation. The derivative of the
log-space einsum operation specified by this equation will be computed.
args (Sequence
[Tensor
]) – The inputs to the log-space einsum operation whose derivative
+
args (Sequence
[Tensor
]) – The inputs to the log-space einsum operation whose derivative
is being computed.
needs_grad (Sequence
[bool
]) – Indicates which inputs in args
require gradient.
grad (Tensor
) – The gradient of the loss function with respect to the output
+
needs_grad (Sequence
[bool
]) – Indicates which inputs in args
require gradient.
grad (Tensor
) – The gradient of the loss function with respect to the output
of the log-space einsum operation.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
grad_of_neg_inf (Union
[float
, Literal
[‘uniform’]]) – How to handle the gradient of cases where all
+
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
grad_of_neg_inf (Union
[float
, Literal
['uniform'
]]) – How to handle the gradient of cases where all
inputs to a logsumexp are \(-\infty\), which results in an output
of \(-\infty\). The default behavior is to output NaN, which
-matches the behavior of PyTorch’s logsumexp()
, but
-sometimes this is not desired. If a float
is provided, all
+matches the behavior of PyTorch’s logsumexp()
, but
+sometimes this is not desired. If a float
is provided, all
gradients will be set to that value. A value of 0
, which causes the
inputs not to change, may be appropriate. For example, if one input is
a parameter and another is a constant \(-\infty\), it may not make
@@ -284,14 +300,14 @@
saved_max (Optional
[Tensor
]) – See return_max
in
log_einsum_forward()
.
saved_sumexpsub (Optional
[Tensor
]) – See return_sumexpsub
in
+
saved_sumexpsub (Optional
[Tensor
]) – See return_sumexpsub
in
log_einsum_forward()
.
The gradients with respect to each of the inputs to the log-space @@ -312,13 +328,13 @@
equation (Equation
) – A pre-compiled equation.
args (Tensor
) – Input tensors. The number of input tensors must be compatible
+
args (Tensor
) – Input tensors. The number of input tensors must be compatible
with equation
.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
block_size (Union
[int
, AutomaticBlockSize
]) – Block size used to control memory usage.
A tuple containing the max and argmax of the einsum operation. @@ -361,14 +377,14 @@
max_cpu_bytes (int
) – The maximum amount of memory (in bytes) to use
+
max_cpu_bytes (int
) – The maximum amount of memory (in bytes) to use
when the device is cpu
. By default, this is set to 1 GiB.
max_cuda_bytes (Optional
[int
]) – The maximum amount of memory (in bytes) to use
+
max_cuda_bytes (Optional
[int
]) – The maximum amount of memory (in bytes) to use
when the device is cuda
. If None
, then the amount of memory
used will be determined based on the amount of free CUDA memory.
Note that specifying an explicit memory limit is much faster than
querying the amount of free CUDA memory.
cache_available_cuda_memory (bool
) – Only applies when
+
cache_available_cuda_memory (bool
) – Only applies when
max_cuda_bytes
is None
. When true, the amount of available
CUDA memory is only queried the first time einsum is called with
this object as block_size
, and it is reused on subsequent
@@ -376,7 +392,7 @@
cuda_memory_proportion
.cuda_memory_proportion (float
) – Determines the proportion of available
+
cuda_memory_proportion (float
) – Determines the proportion of available
memory used when cache_available_cuda_memory
is true. This
should be a number between 0 and 1.
Note that this function only implements the forward aspect of einsum and
is not differentiable. To turn your instantiation of einsum in a
particular semiring into a differentiable PyTorch
-Function
, implement its derivative and use
+Function
, implement its derivative and use
combine()
to combine the forward and
backward functions into one function. Odds are,
semiring_einsum_forward()
can be used to implement the derivative
@@ -420,7 +436,7 @@
Here is a quick example that implements the equivalent of
-torch.einsum()
:
torch.einsum()
:
def regular_einsum(equation, *args, block_size):
def func(compute_sum):
def add_in_place(a, b):
@@ -448,16 +464,16 @@ Quick search
Likewise, multiply_in_place(a, b)
must implement a *= b
for the
desired definition of *
. The arguments a
and b
are values
returned from sum_block
(see below) and are usually of type
-Tensor
, although they can be something fancier for
+Tensor
, although they can be something fancier for
cases like Viterbi (which involves a pair of tensors: max and argmax).
These functions must modify the object a
in-place; the return value
is ignored.
sum_block(a, dims)
should be a function that “sums” over multiple
dimensions in a tensor at once. It must return its result. a
is always
-a Tensor
. dims
is a tuple
of
-int
s representing the dimensions in a
to sum out. Take
+a Tensor
. dims
is a tuple
of
+int
s representing the dimensions in a
to sum out. Take
special care to handle the case where dims
is an empty tuple –
-in particular, keep in mind that torch.sum()
returns a scalar
+in particular, keep in mind that torch.sum()
returns a scalar
when dims
is an empty tuple. Simply returning a
is sufficient to
handle this edge case. Note that it is always safe to return a view of
a
from sum_block
, since a
is itself never a view of the input
@@ -466,7 +482,7 @@
Quick search
third argument, var_values
, which contains the current indexes of the
parts of the input tensors being summed over (sum_block
is called
multiple times on different slices of the inputs). var_values
is a
-tuple
of range
objects representing the ranges
+tuple
of range
objects representing the ranges
of indexes representing the current slice. var_values
contains an
entry for each summed variable, in order of first appearance in the
equation.
@@ -483,15 +499,15 @@ Quick search
equation (Equation
) – A pre-compiled equation.
block_size (Union
[int
, AutomaticBlockSize
]) – To keep memory usage in check, the einsum summation is
+
block_size (Union
[int
, AutomaticBlockSize
]) – To keep memory usage in check, the einsum summation is
done over multiple “windows” or “blocks” of bounded size. This
parameter sets the maximum size of these windows. More precisely, it
defines the maximum size of the range of values of each summed
variable that is included in a single window. If there are n
summed variables, the size of the window tensor is proportional to
block_size ** n
.
func (Callable
) – A callback of the form described above.
func (Callable
) – A callback of the form described above.
forward (Callable
) – The forward implementation.
backward (Callable
) – The backward implementation. Its signature must be
+
forward (Callable
) – The forward implementation.
backward (Callable
) – The backward implementation. Its signature must be
backward(equation, args, needs_grad, grad, block_size)
, and it
-must return a tuple
of Tensor
+must return a tuple
of Tensor
containing the gradients with respect to args
. The \(i\)th
gradient may be None
if needs_grad[i]
is False
.
forward_options (Tuple
[str
]) – A list of optional keyword arguments that should
+
forward_options (Tuple
[str
]) – A list of optional keyword arguments that should
be passed to the forward function.
backward_options (Tuple
[str
]) – A list of optional keyword arguments that should
+
backward_options (Tuple
[str
]) – A list of optional keyword arguments that should
be passed to the backward function.
A function whose return value is compatible with PyTorch’s @@ -542,8 +558,8 @@
semirings.
It includes implementations for the real, log, and Viterbi semirings out of
the box and can be extended to support additional semirings. It can also offer
-better performance than the built-in torch.einsum()
function and
+better performance than the built-in torch.einsum()
function and
makes the memory-execution time tradeoff configurable, allowing you to run
large einsum operations that might otherwise be impossible given typical
hardware constraints.
@@ -121,30 +122,45 @@ Semiring Einsum (torch.einsum()
function,
+better space complexity than the built-in torch.einsum()
function,
because it does not need to create intermediate tensors whose sizes are
proportional to the dimensions being summed over.
Contents:
Installation¶
You can install torch_semiring_einsum
from PyPI using pip
:
-pip install torch-semiring-einsum
+pip install torch-semiring-einsum
or a package manager like Poetry:
-poetry add torch-semiring-einsum
+poetry add torch-semiring-einsum
You can also install it directly from GitHub:
-pip install git+git://github.com/bdusell/semiring-einsum.git
+pip install git+git://github.com/bdusell/semiring-einsum.git
-poetry add git+https://github.com/bdusell/semiring-einsum@master
+poetry add git+https://github.com/bdusell/semiring-einsum@master
@@ -220,7 +236,7 @@ Semirings
Einsum Syntax¶
This package supports the same einsum equation syntax as
-torch.einsum()
, except it does not support ellipses (...
) syntax.
+torch.einsum()
, except it does not support ellipses (...
) syntax.
Time and Space Complexity¶
@@ -230,7 +246,7 @@ Time and Space Complexity\(A \times K\). There is even a routine in NumPy,
-numpy.einsum_path()
, which figures out the best contraction order.
+numpy.einsum_path()
, which figures out the best contraction order.
However, it should, in principle, be possible to avoid this by summing over
all tensors at the same time. This is exactly what torch_semiring_einsum
does,
and as a result the amount of scratch space the forward pass of einsum requires
@@ -260,7 +276,7 @@ Time and Space Complexity
As we can see, execution time gets dramatically better even with small
-increases in block size. The built-in torch.einsum()
function is still
+increases in block size. The built-in torch.einsum()
function is still
much faster than the blocked versions, but when the block size is unbounded
and the summation is fully parallel, it is even faster.
@@ -302,8 +318,8 @@ BibliographySphinx 5.1.1
- & Alabaster 0.7.12
+ Powered by Sphinx 5.3.0
+ & Alabaster 0.7.13
|
+
@@ -127,8 +128,8 @@ Python Module Index
©2019-2022, Brian DuSell.
|
- Powered by Sphinx 5.1.1
- & Alabaster 0.7.12
+ Powered by Sphinx 5.3.0
+ & Alabaster 0.7.13
diff --git a/search.html b/search.html
index 5e661ae..6af46f4 100644
--- a/search.html
+++ b/search.html
@@ -14,6 +14,7 @@
+
@@ -128,8 +129,8 @@ Search
©2019-2022, Brian DuSell.
|
- Powered by Sphinx 5.1.1
- & Alabaster 0.7.12
+ Powered by Sphinx 5.3.0
+ & Alabaster 0.7.13
diff --git a/searchindex.js b/searchindex.js
index d0a30ec..da48d4b 100644
--- a/searchindex.js
+++ b/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"docnames": ["api", "index"], "filenames": ["api.rst", "index.rst"], "titles": ["API for torch_semiring_einsum
", "Semiring Einsum (torch_semiring_einsum
)"], "terms": {"compile_equ": [0, 1], "equat": [0, 1], "pre": [0, 1], "compil": [0, 1], "an": [0, 1], "einsum": 0, "us": [0, 1], "function": [0, 1], "thi": [0, 1], "packag": [0, 1], "paramet": [0, 1], "str": 0, "syntax": 0, "return": 0, "type": 0, "A": [0, 1], "class": 0, "ha": [0, 1], "been": 0, "some": [0, 1], "data": 0, "structur": 0, "__init__": 0, "sourc": 0, "variable_loc": 0, "input_vari": 0, "output_vari": 0, "num_vari": 0, "arg": 0, "block_siz": 0, "kwarg": 0, "differenti": [0, 1], "version": [0, 1], "ordinari": 0, "real": [0, 1], "combin": 0, "real_einsum_forward": 0, "real_einsum_backward": 0, "one": [0, 1], "auto": 0, "automatic_block_s": 0, "where": [0, 1], "addit": [0, 1], "multipl": [0, 1], "have": [0, 1], "usual": 0, "mean": [0, 1], "differ": [0, 1], "memori": [0, 1], "runtim": 0, "characterist": 0, "than": [0, 1], "torch": [0, 1], "which": [0, 1], "can": [0, 1], "tune": [0, 1], "higher": 0, "valu": [0, 1], "result": [0, 1], "faster": [0, 1], "usag": 0, "In": [0, 1], "case": [0, 1], "when": [0, 1], "deal": 0, "summat": [0, 1], "over": [0, 1], "more": [0, 1], "two": [0, 1], "input": [0, 1], "tensor": [0, 1], "onc": 0, "implement": [0, 1], "better": [0, 1], "space": 0, "complex": 0, "becaus": [0, 1], "doe": [0, 1], "creat": [0, 1], "intermedi": [0, 1], "whose": [0, 1], "size": [0, 1], "ar": [0, 1], "proport": [0, 1], "dimens": [0, 1], "being": [0, 1], "sum": [0, 1], "The": [0, 1], "number": [0, 1], "must": [0, 1], "compat": 0, "union": 0, "int": 0, "automaticblocks": 0, "block": [0, 1], "control": 0, "output": [0, 1], "needs_grad": 0, "grad": 0, "comput": [0, 1], "deriv": 0, "like": [0, 1], "forward": [0, 1], "pass": [0, 1], "backward": [0, 1], "i": 0, "done": 0, "effici": [0, 1], "fashion": 0, "do": [0, 1], "fix": [0, 1], "chunk": 0, "oper": [0, 1], "specifi": 0, "sequenc": 0, "bool": 0, "indic": 0, "requir": [0, 1], "gradient": 0, "loss": 0, "respect": 0, "list": 0, "option": 0, "each": [0, 1], "none": 0, "log_einsum": [0, 1], "save_max": 0, "true": [0, 1], "save_sumexpsub": 0, "grad_of_neg_inf": 0, "nan": 0, "log": [0, 1], "log_einsum_forward": 0, "log_einsum_backward": 0, "If": 0, "save": 0, "maximum": 0, "term": [0, 1], "reus": 0, "same": [0, 1], "set": [0, 1], "fals": 0, "increas": [0, 1], "return_max": 0, "return_sumexpsub": 0, "b": [0, 1], "replac": 0, "exp": 0, "time": 0, "also": [0, 1], "subtract": 0, "from": [0, 1], "tupl": 0, "contain": 0, "extra": 0, "saved_max": 0, "saved_sumexpsub": 0, "float": 0, "liter": 0, "uniform": 0, "how": [0, 1], "handl": 0, "all": [0, 1], "logsumexp": 0, "infti": 0, "default": [0, 1], "behavior": 0, "match": 0, "pytorch": [0, 1], "": 0, "sometim": 0, "desir": [0, 1], "provid": 0, "0": 0, "caus": 0, "chang": [0, 1], "mai": 0, "appropri": 0, "For": [0, 1], "exampl": [0, 1], "anoth": [0, 1], "constant": [0, 1], "make": [0, 1], "sens": 0, "try": 0, "what": 0, "equival": 0, "would": [0, 1], "0x": 0, "x": 0, "On": 0, "other": [0, 1], "hand": 0, "string": 0, "distribut": 0, "1": [0, 1], "softmax": 0, "attempt": 0, "abov": 0, "note": [0, 1], "onli": 0, "current": 0, "see": [0, 1], "log_viterbi_einsum_forward": 0, "viterbi": [0, 1], "max": 0, "longtensor": 0, "argmax": 0, "first": [0, 1], "element": [0, 1], "simpli": 0, "second": 0, "out": [0, 1], "variabl": [0, 1], "maxim": 0, "those": 0, "n_1": 0, "cdot": 0, "n_m": 0, "k": [0, 1], "were": 0, "m": 0, "th": 0, "index": 0, "repres": 0, "order": [0, 1], "appear": [0, 1], "determin": 0, "automat": [0, 1], "base": [0, 1], "avail": [0, 1], "accord": 0, "argument": [0, 1], "amount": [0, 1], "should": [0, 1], "devic": 0, "cuda": 0, "calcul": [0, 1], "free": [0, 1], "gpu": [0, 1], "big": 0, "possibl": [0, 1], "without": 0, "exceed": 0, "cpu": [0, 1], "max_cpu_byt": 0, "much": [0, 1], "1073741824": 0, "max_cuda_byt": 0, "cache_available_cuda_memori": 0, "cuda_memory_proport": 0, "8": 0, "repr_str": 0, "byte": 0, "By": [0, 1], "gib": [0, 1], "explicit": 0, "limit": 0, "queri": 0, "appli": 0, "call": [0, 1], "object": 0, "subsequ": 0, "significantli": 0, "everi": [0, 1], "To": 0, "account": 0, "futur": 0, "decreas": [0, 1], "portion": 0, "between": [0, 1], "semiring_einsum_forward": 0, "func": 0, "custom": 0, "callback": 0, "main": 0, "workhors": 0, "semir": 0, "It": [0, 1], "take": 0, "awai": 0, "burden": 0, "figur": [0, 1], "wai": 0, "perform": [0, 1], "flexibl": 0, "enough": 0, "support": [0, 1], "through": 0, "featur": 0, "intern": 0, "well": 0, "aspect": 0, "turn": 0, "your": 0, "instanti": [0, 1], "particular": 0, "its": 0, "odd": 0, "despit": 0, "includ": [0, 1], "name": 0, "noth": 0, "prevent": 0, "you": [0, 1], "tool": 0, "step": 0, "compute_sum": 0, "itself": 0, "execut": [0, 1], "singl": 0, "suppli": 0, "realli": 0, "place": [0, 1], "whatev": 0, "often": [0, 1], "consist": 0, "e": 0, "g": 0, "numer": 0, "stabl": 0, "them": 0, "here": [0, 1], "quick": [0, 1], "def": 0, "regular_einsum": 0, "add_in_plac": 0, "sum_block": 0, "dim": 0, "els": 0, "edg": 0, "correctli": 0, "multiply_in_plac": 0, "full": [0, 1], "signatur": 0, "include_index": 0, "output_dtyp": 0, "accept": 0, "definit": [0, 1], "likewis": 0, "below": 0, "although": 0, "thei": [0, 1], "someth": 0, "fancier": 0, "involv": 0, "pair": 0, "These": [0, 1], "modifi": 0, "ignor": 0, "alwai": 0, "special": 0, "care": 0, "empti": 0, "keep": 0, "mind": 0, "scalar": 0, "suffici": 0, "safe": 0, "view": [0, 1], "sinc": [0, 1], "never": [0, 1], "new": [0, 1], "receiv": 0, "third": 0, "var_valu": 0, "part": 0, "slice": 0, "rang": [0, 1], "entri": [0, 1], "dtype": 0, "compon": 0, "most": 0, "check": 0, "window": 0, "bound": [0, 1], "precis": 0, "defin": 0, "n": 0, "callabl": 0, "form": [0, 1], "describ": [0, 1], "forward_opt": 0, "backward_opt": 0, "work": 0, "autograd": 0, "mechan": 0, "separ": [0, 1], "allow": [0, 1], "otherwis": [0, 1], "Its": 0, "keyword": 0, "github": 1, "re": 1, "box": 1, "extend": 1, "offer": 1, "built": 1, "tradeoff": 1, "configur": 1, "run": 1, "larg": 1, "might": 1, "imposs": 1, "given": 1, "typic": 1, "hardwar": 1, "constraint": 1, "wa": 1, "specif": 1, "design": 1, "particularli": 1, "wherea": 1, "naiv": 1, "could": 1, "easili": 1, "consum": 1, "huge": 1, "veri": 1, "conserv": 1, "footprint": 1, "enforc": 1, "upper": 1, "reduc": 1, "parallel": 1, "right": 1, "still": 1, "fast": 1, "chosen": 1, "choos": 1, "exce": 1, "yourself": 1, "speed": 1, "even": 1, "need": 1, "pypi": 1, "pip": 1, "manag": 1, "poetri": 1, "add": 1, "directli": 1, "git": 1, "com": 1, "bdusel": 1, "http": 1, "master": 1, "batch": 1, "matrix": 1, "import": 1, "bik": 1, "bkj": 1, "bij": 1, "multipli": 1, "rand": 1, "10": 1, "3": 1, "5": 1, "requires_grad": 1, "7": 1, "c": 1, "now": 1, "unlik": 1, "numpi": 1, "rather": 1, "pars": 1, "scratch": 1, "detail": 1, "so": 1, "math": 1, "librari": 1, "tensorflow": 1, "express": 1, "multi": 1, "dimension": 1, "linear": 1, "algebra": 1, "simpl": 1, "concis": 1, "inspir": 1, "einstein": 1, "kernel": 1, "product": 1, "ik": 1, "kj": 1, "ij": 1, "lower": 1, "letter": 1, "j": 1, "serv": 1, "label": 1, "left": 1, "side": 1, "shape": 1, "c_": 1, "sum_k": 1, "a_": 1, "b_": 1, "three": 1, "swap": 1, "properti": 1, "we": 1, "piec": 1, "code": 1, "algorithm": 1, "hidden": 1, "markov": 1, "model": 1, "instanc": 1, "formal": 1, "introduct": 1, "context": 1, "grammar": 1, "goo99": 1, "except": 1, "ellips": 1, "consid": 1, "ak": 1, "contract": 1, "There": 1, "routin": 1, "einsum_path": 1, "best": 1, "howev": 1, "principl": 1, "avoid": 1, "exactli": 1, "remain": 1, "innov": 1, "crucial": 1, "strike": 1, "balanc": 1, "method": 1, "compromis": 1, "extrem": 1, "iter": 1, "entir": 1, "unbear": 1, "slow": 1, "exorbit": 1, "launch": 1, "smaller": 1, "behav": 1, "larger": 1, "But": 1, "ensur": 1, "scale": 1, "our": 1, "o": 1, "instead": 1, "plot": 1, "show": 1, "vari": 1, "As": 1, "get": 1, "dramat": 1, "small": 1, "unbound": 1, "fulli": 1, "gener": 1, "hold": 1, "open": 1, "up": 1, "world": 1, "modul": 1, "search": 1, "page": 1, "joshua": 1, "goodman": 1, "linguist": 1, "25": 1, "4": 1, "573": 1, "605": 1, "1999": 1}, "objects": {"": [[0, 0, 0, "-", "torch_semiring_einsum"]], "torch_semiring_einsum": [[0, 1, 1, "", "AUTOMATIC_BLOCK_SIZE"], [0, 2, 1, "", "AutomaticBlockSize"], [0, 2, 1, "", "Equation"], [0, 4, 1, "", "combine"], [0, 4, 1, "", "compile_equation"], [0, 4, 1, "", "einsum"], [0, 4, 1, "", "log_einsum"], [0, 4, 1, "", "log_einsum_backward"], [0, 4, 1, "", "log_einsum_forward"], [0, 4, 1, "", "log_viterbi_einsum_forward"], [0, 4, 1, "", "real_einsum_backward"], [0, 4, 1, "", "real_einsum_forward"], [0, 4, 1, "", "semiring_einsum_forward"]], "torch_semiring_einsum.AutomaticBlockSize": [[0, 3, 1, "", "__init__"]], "torch_semiring_einsum.Equation": [[0, 3, 1, "", "__init__"]]}, "objtypes": {"0": "py:module", "1": "py:data", "2": "py:class", "3": "py:method", "4": "py:function"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "data", "Python data"], "2": ["py", "class", "Python class"], "3": ["py", "method", "Python method"], "4": ["py", "function", "Python function"]}, "titleterms": {"api": [0, 1], "torch_semiring_einsum": [0, 1], "semir": 1, "einsum": 1, "content": 1, "instal": 1, "basic": 1, "usag": 1, "document": 1, "what": 1, "i": 1, "syntax": 1, "time": 1, "space": 1, "complex": 1, "index": 1, "bibliographi": 1}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinxcontrib.bibtex": 9, "sphinx": 56}})
\ No newline at end of file
+Search.setIndex({"docnames": ["api", "index"], "filenames": ["api.rst", "index.rst"], "titles": ["API for torch_semiring_einsum
", "Semiring Einsum (torch_semiring_einsum
)"], "terms": {"compile_equ": [0, 1], "equat": [0, 1], "pre": [0, 1], "compil": [0, 1], "an": [0, 1], "einsum": 0, "us": [0, 1], "function": [0, 1], "thi": [0, 1], "packag": [0, 1], "paramet": [0, 1], "str": 0, "syntax": 0, "return": 0, "type": 0, "A": [0, 1], "class": 0, "ha": [0, 1], "been": 0, "some": [0, 1], "data": 0, "structur": 0, "__init__": 0, "sourc": 0, "variable_loc": 0, "input_vari": 0, "output_vari": 0, "num_vari": 0, "arg": 0, "block_siz": 0, "kwarg": 0, "differenti": [0, 1], "version": [0, 1], "ordinari": 0, "real": [0, 1], "combin": [0, 1], "real_einsum_forward": [0, 1], "real_einsum_backward": [0, 1], "one": [0, 1], "auto": 0, "automatic_block_s": [0, 1], "where": [0, 1], "addit": [0, 1], "multipl": [0, 1], "have": [0, 1], "usual": 0, "mean": [0, 1], "differ": [0, 1], "memori": [0, 1], "runtim": 0, "characterist": 0, "than": [0, 1], "torch": [0, 1], "which": [0, 1], "can": [0, 1], "tune": [0, 1], "higher": 0, "valu": [0, 1], "result": [0, 1], "faster": [0, 1], "usag": 0, "In": [0, 1], "case": [0, 1], "when": [0, 1], "deal": 0, "summat": [0, 1], "over": [0, 1], "more": [0, 1], "two": [0, 1], "input": [0, 1], "tensor": [0, 1], "onc": 0, "implement": [0, 1], "better": [0, 1], "space": 0, "complex": 0, "becaus": [0, 1], "doe": [0, 1], "creat": [0, 1], "intermedi": [0, 1], "whose": [0, 1], "size": [0, 1], "ar": [0, 1], "proport": [0, 1], "dimens": [0, 1], "being": [0, 1], "sum": [0, 1], "The": [0, 1], "number": [0, 1], "must": [0, 1], "compat": 0, "union": 0, "int": 0, "automaticblocks": [0, 1], "block": [0, 1], "control": 0, "output": [0, 1], "needs_grad": 0, "grad": 0, "comput": [0, 1], "deriv": 0, "like": [0, 1], "forward": [0, 1], "pass": [0, 1], "backward": [0, 1], "i": 0, "done": 0, "effici": [0, 1], "fashion": 0, "do": [0, 1], "fix": [0, 1], "chunk": 0, "oper": [0, 1], "specifi": 0, "sequenc": 0, "bool": 0, "indic": 0, "requir": [0, 1], "gradient": 0, "loss": 0, "respect": 0, "list": 0, "option": 0, "each": [0, 1], "none": 0, "log_einsum": [0, 1], "save_max": 0, "true": [0, 1], "save_sumexpsub": 0, "grad_of_neg_inf": 0, "nan": 0, "log": [0, 1], "log_einsum_forward": [0, 1], "log_einsum_backward": [0, 1], "If": 0, "save": 0, "maximum": 0, "term": [0, 1], "reus": 0, "same": [0, 1], "set": [0, 1], "fals": 0, "increas": [0, 1], "return_max": 0, "return_sumexpsub": 0, "b": [0, 1], "replac": 0, "exp": 0, "time": 0, "also": [0, 1], "subtract": 0, "from": [0, 1], "tupl": 0, "contain": 0, "extra": 0, "saved_max": 0, "saved_sumexpsub": 0, "float": 0, "liter": 0, "uniform": 0, "how": [0, 1], "handl": 0, "all": [0, 1], "logsumexp": 0, "infti": 0, "default": [0, 1], "behavior": 0, "match": 0, "pytorch": [0, 1], "": 0, "sometim": 0, "desir": [0, 1], "provid": 0, "0": 0, "caus": 0, "chang": [0, 1], "mai": 0, "appropri": 0, "For": [0, 1], "exampl": [0, 1], "anoth": [0, 1], "constant": [0, 1], "make": [0, 1], "sens": 0, "try": 0, "what": 0, "equival": 0, "would": [0, 1], "0x": 0, "x": 0, "On": 0, "other": [0, 1], "hand": 0, "string": 0, "distribut": 0, "1": [0, 1], "softmax": 0, "attempt": 0, "abov": 0, "note": [0, 1], "onli": 0, "current": 0, "see": [0, 1], "log_viterbi_einsum_forward": [0, 1], "viterbi": [0, 1], "max": 0, "longtensor": 0, "argmax": 0, "first": [0, 1], "element": [0, 1], "simpli": 0, "second": 0, "out": [0, 1], "variabl": [0, 1], "maxim": 0, "those": 0, "n_1": 0, "cdot": 0, "n_m": 0, "k": [0, 1], "were": 0, "m": 0, "th": 0, "index": 0, "repres": 0, "order": [0, 1], "appear": [0, 1], "determin": 0, "automat": [0, 1], "base": [0, 1], "avail": [0, 1], "accord": 0, "argument": [0, 1], "amount": [0, 1], "should": [0, 1], "devic": 0, "cuda": 0, "calcul": [0, 1], "free": [0, 1], "gpu": [0, 1], "big": 0, "possibl": [0, 1], "without": 0, "exceed": 0, "cpu": [0, 1], "max_cpu_byt": 0, "much": [0, 1], "1073741824": 0, "max_cuda_byt": 0, "cache_available_cuda_memori": 0, "cuda_memory_proport": 0, "8": 0, "repr_str": 0, "byte": 0, "By": [0, 1], "gib": [0, 1], "explicit": 0, "limit": 0, "queri": 0, "appli": 0, "call": [0, 1], "object": 0, "subsequ": 0, "significantli": 0, "everi": [0, 1], "To": 0, "account": 0, "futur": 0, "decreas": [0, 1], "portion": 0, "between": [0, 1], "semiring_einsum_forward": [0, 1], "func": 0, "custom": 0, "callback": 0, "main": 0, "workhors": 0, "semir": 0, "It": [0, 1], "take": 0, "awai": 0, "burden": 0, "figur": [0, 1], "wai": 0, "perform": [0, 1], "flexibl": 0, "enough": 0, "support": [0, 1], "through": 0, "featur": 0, "intern": 0, "well": 0, "aspect": 0, "turn": 0, "your": 0, "instanti": [0, 1], "particular": 0, "its": 0, "odd": 0, "despit": 0, "includ": [0, 1], "name": 0, "noth": 0, "prevent": 0, "you": [0, 1], "tool": 0, "step": 0, "compute_sum": 0, "itself": 0, "execut": [0, 1], "singl": 0, "suppli": 0, "realli": 0, "place": [0, 1], "whatev": 0, "often": [0, 1], "consist": 0, "e": 0, "g": 0, "numer": 0, "stabl": 0, "them": 0, "here": [0, 1], "quick": [0, 1], "def": 0, "regular_einsum": 0, "add_in_plac": 0, "sum_block": 0, "dim": 0, "els": 0, "edg": 0, "correctli": 0, "multiply_in_plac": 0, "full": [0, 1], "signatur": 0, "include_index": 0, "output_dtyp": 0, "accept": 0, "definit": [0, 1], "likewis": 0, "below": 0, "although": 0, "thei": [0, 1], "someth": 0, "fancier": 0, "involv": 0, "pair": 0, "These": [0, 1], "modifi": 0, "ignor": 0, "alwai": 0, "special": 0, "care": 0, "empti": 0, "keep": 0, "mind": 0, "scalar": 0, "suffici": 0, "safe": 0, "view": [0, 1], "sinc": [0, 1], "never": [0, 1], "new": [0, 1], "receiv": 0, "third": 0, "var_valu": 0, "part": 0, "slice": 0, "rang": [0, 1], "entri": [0, 1], "dtype": 0, "compon": 0, "most": 0, "check": 0, "window": 0, "bound": [0, 1], "precis": 0, "defin": 0, "n": 0, "callabl": 0, "form": [0, 1], "describ": [0, 1], "forward_opt": 0, "backward_opt": 0, "work": 0, "autograd": 0, "mechan": 0, "separ": [0, 1], "allow": [0, 1], "otherwis": [0, 1], "Its": 0, "keyword": 0, "github": 1, "re": 1, "box": 1, "extend": 1, "offer": 1, "built": 1, "tradeoff": 1, "configur": 1, "run": 1, "larg": 1, "might": 1, "imposs": 1, "given": 1, "typic": 1, "hardwar": 1, "constraint": 1, "wa": 1, "specif": 1, "design": 1, "particularli": 1, "wherea": 1, "naiv": 1, "could": 1, "easili": 1, "consum": 1, "huge": 1, "veri": 1, "conserv": 1, "footprint": 1, "enforc": 1, "upper": 1, "reduc": 1, "parallel": 1, "right": 1, "still": 1, "fast": 1, "chosen": 1, "choos": 1, "exce": 1, "yourself": 1, "speed": 1, "even": 1, "need": 1, "pypi": 1, "pip": 1, "manag": 1, "poetri": 1, "add": 1, "directli": 1, "git": 1, "com": 1, "bdusel": 1, "http": 1, "master": 1, "batch": 1, "matrix": 1, "import": 1, "bik": 1, "bkj": 1, "bij": 1, "multipli": 1, "rand": 1, "10": 1, "3": 1, "5": 1, "requires_grad": 1, "7": 1, "c": 1, "now": 1, "unlik": 1, "numpi": 1, "rather": 1, "pars": 1, "scratch": 1, "detail": 1, "so": 1, "math": 1, "librari": 1, "tensorflow": 1, "express": 1, "multi": 1, "dimension": 1, "linear": 1, "algebra": 1, "simpl": 1, "concis": 1, "inspir": 1, "einstein": 1, "kernel": 1, "product": 1, "ik": 1, "kj": 1, "ij": 1, "lower": 1, "letter": 1, "j": 1, "serv": 1, "label": 1, "left": 1, "side": 1, "shape": 1, "c_": 1, "sum_k": 1, "a_": 1, "b_": 1, "three": 1, "swap": 1, "properti": 1, "we": 1, "piec": 1, "code": 1, "algorithm": 1, "hidden": 1, "markov": 1, "model": 1, "instanc": 1, "formal": 1, "introduct": 1, "context": 1, "grammar": 1, "goo99": 1, "except": 1, "ellips": 1, "consid": 1, "ak": 1, "contract": 1, "There": 1, "routin": 1, "einsum_path": 1, "best": 1, "howev": 1, "principl": 1, "avoid": 1, "exactli": 1, "remain": 1, "innov": 1, "crucial": 1, "strike": 1, "balanc": 1, "method": 1, "compromis": 1, "extrem": 1, "iter": 1, "entir": 1, "unbear": 1, "slow": 1, "exorbit": 1, "launch": 1, "smaller": 1, "behav": 1, "larger": 1, "But": 1, "ensur": 1, "scale": 1, "our": 1, "o": 1, "instead": 1, "plot": 1, "show": 1, "vari": 1, "As": 1, "get": 1, "dramat": 1, "small": 1, "unbound": 1, "fulli": 1, "gener": 1, "hold": 1, "open": 1, "up": 1, "world": 1, "modul": 1, "search": 1, "page": 1, "joshua": 1, "goodman": 1, "linguist": 1, "25": 1, "4": 1, "573": 1, "605": 1, "1999": 1}, "objects": {"": [[0, 0, 0, "-", "torch_semiring_einsum"]], "torch_semiring_einsum": [[0, 1, 1, "", "AUTOMATIC_BLOCK_SIZE"], [0, 2, 1, "", "AutomaticBlockSize"], [0, 2, 1, "", "Equation"], [0, 4, 1, "", "combine"], [0, 4, 1, "", "compile_equation"], [0, 4, 1, "", "einsum"], [0, 4, 1, "", "log_einsum"], [0, 4, 1, "", "log_einsum_backward"], [0, 4, 1, "", "log_einsum_forward"], [0, 4, 1, "", "log_viterbi_einsum_forward"], [0, 4, 1, "", "real_einsum_backward"], [0, 4, 1, "", "real_einsum_forward"], [0, 4, 1, "", "semiring_einsum_forward"]], "torch_semiring_einsum.AutomaticBlockSize": [[0, 3, 1, "", "__init__"]], "torch_semiring_einsum.Equation": [[0, 3, 1, "", "__init__"]]}, "objtypes": {"0": "py:module", "1": "py:data", "2": "py:class", "3": "py:method", "4": "py:function"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "data", "Python data"], "2": ["py", "class", "Python class"], "3": ["py", "method", "Python method"], "4": ["py", "function", "Python function"]}, "titleterms": {"api": [0, 1], "torch_semiring_einsum": [0, 1], "semir": 1, "einsum": 1, "content": 1, "instal": 1, "basic": 1, "usag": 1, "document": 1, "what": 1, "i": 1, "syntax": 1, "time": 1, "space": 1, "complex": 1, "index": 1, "bibliographi": 1}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinxcontrib.bibtex": 9, "sphinx": 57}, "alltitles": {"API for torch_semiring_einsum": [[0, "module-torch_semiring_einsum"]], "Semiring Einsum (torch_semiring_einsum)": [[1, "semiring-einsum-torch-semiring-einsum"]], "Contents:": [[1, null]], "Installation": [[1, "installation"]], "Basic Usage": [[1, "basic-usage"]], "API Documentation": [[1, "api-documentation"]], "What is Einsum?": [[1, "what-is-einsum"]], "Semirings": [[1, "id1"]], "Einsum Syntax": [[1, "einsum-syntax"]], "Time and Space Complexity": [[1, "time-and-space-complexity"]], "Indexes": [[1, "indexes"]], "Bibliography": [[1, "bibliography"]]}, "indexentries": {"automatic_block_size (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.AUTOMATIC_BLOCK_SIZE"]], "automaticblocksize (class in torch_semiring_einsum)": [[0, "torch_semiring_einsum.AutomaticBlockSize"]], "equation (class in torch_semiring_einsum)": [[0, "torch_semiring_einsum.Equation"]], "__init__() (torch_semiring_einsum.automaticblocksize method)": [[0, "torch_semiring_einsum.AutomaticBlockSize.__init__"]], "__init__() (torch_semiring_einsum.equation method)": [[0, "torch_semiring_einsum.Equation.__init__"]], "combine() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.combine"]], "compile_equation() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.compile_equation"]], "einsum() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.einsum"]], "log_einsum() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.log_einsum"]], "log_einsum_backward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.log_einsum_backward"]], "log_einsum_forward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.log_einsum_forward"]], "log_viterbi_einsum_forward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.log_viterbi_einsum_forward"]], "module": [[0, "module-torch_semiring_einsum"]], "real_einsum_backward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.real_einsum_backward"]], "real_einsum_forward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.real_einsum_forward"]], "semiring_einsum_forward() (in module torch_semiring_einsum)": [[0, "torch_semiring_einsum.semiring_einsum_forward"]], "torch_semiring_einsum": [[0, "module-torch_semiring_einsum"]]}})
\ No newline at end of file