1
0
mirror of https://github.com/ROCm/jax.git synced 2025-04-19 21:36:05 +00:00

Notebooks on RTD ()

This commit is contained in:
Stephan Hoyer 2019-09-30 11:00:02 -07:00 committed by GitHub
parent 064014b53c
commit 9bd7330e1f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 3968 additions and 3939 deletions

1
.gitignore vendored

@ -9,3 +9,4 @@
build/
dist/
.mypy_cache/
docs/notebooks/.ipynb_checkpoints/

@ -59,6 +59,7 @@ extensions = [
'sphinx.ext.mathjax',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
'nbsphinx',
]
intersphinx_mapping = {
@ -98,6 +99,81 @@ pygments_style = None
autosummary_generate = True
napolean_use_rtype = False
# -- Options for nbsphinx -----------------------------------------------------
# Execute notebooks before conversion: 'always', 'never', 'auto' (default)
# TODO(shoyer): switch to executing notebooks as part of the doc build process.
nbsphinx_execute = 'never'
# Use this kernel instead of the one stored in the notebook metadata:
#nbsphinx_kernel_name = 'python3'
# List of arguments to be passed to the kernel that executes the notebooks:
# nbsphinx_execute_arguments = []
# If True, the build process is continued even if an exception occurs:
#nbsphinx_allow_errors = True
# Controls when a cell will time out (defaults to 30; use -1 for no timeout):
#nbsphinx_timeout = 60
# Default Pygments lexer for syntax highlighting in code cells:
#nbsphinx_codecell_lexer = 'ipython3'
# Width of input/output prompts used in CSS:
#nbsphinx_prompt_width = '8ex'
# If window is narrower than this, input/output prompts are on separate lines:
#nbsphinx_responsive_width = '700px'
# This is processed by Jinja2 and inserted before each notebook
nbsphinx_prolog = r"""
{% set docname = 'docs/' + env.doc2path(env.docname, base=None) %}
.. only:: html
.. role:: raw-html(raw)
:format: html
.. nbinfo::
Interactive online version:
:raw-html:`<a href="https://colab.research.google.com/github/google/jax/{{ docname }}"><img alt="Open In Colab" src="https://colab.research.google.com/assets/colab-badge.svg" style="vertical-align:text-bottom"></a>`
__ https://github.com/google/jax/blob/
{{ env.config.release }}/{{ docname }}
"""
# This is processed by Jinja2 and inserted after each notebook
# nbsphinx_epilog = r"""
# """
# Input prompt for code cells. "%s" is replaced by the execution count.
#nbsphinx_input_prompt = 'In [%s]:'
# Output prompt for code cells. "%s" is replaced by the execution count.
#nbsphinx_output_prompt = 'Out[%s]:'
# Specify conversion functions for custom notebook formats:
#import jupytext
#nbsphinx_custom_formats = {
# '.Rmd': lambda s: jupytext.reads(s, '.Rmd'),
#}
# Link or path to require.js, set to empty string to disable
#nbsphinx_requirejs_path = ''
# Options for loading require.js
#nbsphinx_requirejs_options = {'async': 'async'}
# mathjax_config = {
# 'TeX': {'equationNumbers': {'autoNumber': 'AMS', 'useLabelIds': True}},
# }
# Additional files needed for generating LaTeX/PDF output:
# latex_additional_files = ['references.bib']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for

@ -7,6 +7,14 @@ JIT to GPU/TPU, and more.
For an introduction to JAX, start at the
`JAX GitHub page <https://github.com/google/jax>`_.
.. toctree::
:maxdepth: 1
:caption: Tutorials
notebooks/quickstart
notebooks/autodiff_cookbook
notebooks/Common_Gotchas_in_JAX
.. toctree::
:maxdepth: 1
:caption: Notes
@ -19,7 +27,7 @@ For an introduction to JAX, start at the
.. toctree::
:maxdepth: 3
:caption: Contents
:caption: API documentation
jax

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

@ -17,38 +17,6 @@
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "logZcM_HEnve",
"colab_type": "text"
},
"source": [
"##### Copyright 2018 Google LLC.\n",
"\n",
"Licensed under the Apache License, Version 2.0 (the \"License\");"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "QwN47xiBEsKz",
"colab_type": "text"
},
"source": [
"Licensed under the Apache License, Version 2.0 (the \"License\");\n",
"you may not use this file except in compliance with the License.\n",
"You may obtain a copy of the License at\n",
"\n",
"https://www.apache.org/licenses/LICENSE-2.0\n",
"\n",
"Unless required by applicable law or agreed to in writing, software\n",
"distributed under the License is distributed on an \"AS IS\" BASIS,\n",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
"See the License for the specific language governing permissions and\n",
"limitations under the License."
]
},
{
"cell_type": "markdown",
"metadata": {
@ -58,9 +26,7 @@
"source": [
"# JAX Quickstart\n",
"\n",
"![](https://raw.githubusercontent.com/google/jax/master/images/jax_logo_250px.png)\n",
"\n",
"#### [JAX](https://github.com/google/jax) is NumPy on the CPU, GPU, and TPU, with great automatic differentiation for high-performance machine learning research.\n",
"**JAX is NumPy on the CPU, GPU, and TPU, with great automatic differentiation for high-performance machine learning research.**\n",
"\n",
"With its updated version of [Autograd](https://github.com/hips/autograd), JAX\n",
"can automatically differentiate native Python and NumPy code. It can\n",
@ -88,7 +54,6 @@
"colab": {}
},
"source": [
"from __future__ import print_function, division\n",
"import jax.numpy as np\n",
"from jax import grad, jit, vmap\n",
"from jax import random"
@ -103,7 +68,7 @@
"id": "FQ89jHCYfhpg"
},
"source": [
"### Multiplying Matrices"
"## Multiplying Matrices"
]
},
{
@ -267,7 +232,7 @@
"id": "bTTrTbWvgLUK"
},
"source": [
"### Using `jit` to speed up functions"
"## Using `jit` to speed up functions"
]
},
{
@ -328,7 +293,7 @@
"id": "HxpBc4WmfsEU"
},
"source": [
"### Taking derivatives with `grad`\n",
"## Taking derivatives with `grad`\n",
"\n",
"In addition to evaluating numerical functions, we also want to transform them. One transformation is [automatic differentiation](https://en.wikipedia.org/wiki/Automatic_differentiation). In JAX, just like in [Autograd](https://github.com/HIPS/autograd), you can compute gradients with the `grad` function."
]
@ -435,7 +400,7 @@
"id": "TI4nPsGafxbL"
},
"source": [
"### Auto-vectorization with `vmap`"
"## Auto-vectorization with `vmap`"
]
},
{

@ -1 +1,3 @@
jaxlib
jaxlib
ipykernel
nbsphinx

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff