Removed some files.

This commit is contained in:
Relintai 2023-01-12 19:44:59 +01:00
parent 5372295ab9
commit 48097818a4
13 changed files with 0 additions and 2398 deletions

View File

@ -1,30 +0,0 @@
Andreas Haas <liu.gam3@gmail.com>
Andrew Conrad <aconrad103@gmail.com>
Andrii Doroshenko <xrayez@gmail.com>
puchik <puchik@users.noreply.github.com> <48544263+puchik@users.noreply.github.com>
Chris Bradfield <chris@kidscancode.org> <cb@scribe.net>
clayjohn <claynjohn@gmail.com>
clayjohn <claynjohn@gmail.com> <clayjohn@shaw.ca>
corrigentia <coryan25@gmail.com> <20541985+corrigentia@users.noreply.github.com>
Frido <f.schermutzki@gmail.com>
Frido <f.schermutzki@gmail.com> <43795127+mega-bit@users.noreply.github.com>
Hugo Locurcio <hugo.locurcio@hugo.pro> <calinou@opmbx.org>
Hugo Locurcio <hugo.locurcio@hugo.pro> <hugo.l@openmailbox.org>
Ignacio Etcheverry <ignalfonsore@gmail.com> <neikeq@users.noreply.github.com>
Julian Murgia <the.straton@gmail.com>
Kelly Thomas <kelly.thomas@hotmail.com.au>
Leon Krause <lk@leonkrause.com>
Leon Krause <lk@leonkrause.com> <eska@eska.me>
Max Hilbrunner <m.hilbrunner@gmail.com>
Max Hilbrunner <m.hilbrunner@gmail.com> <mhilbrunner@users.noreply.github.com>
Michael Alexsander <michaelalexsander@protonmail.com>
Nathan Lovato <nathan@gdquest.com>
Paul Joannon <hello@pauljoannon.com> <437025+paulloz@users.noreply.github.com>
Rémi Verschelde <rverschelde@gmail.com> <remi@verschelde.fr>
skyace65 <trekie96@hotmail.com>
skyace65 <trekie96@hotmail.com> <trekie96@Hotmail.com>
TwistedTwigleg <beard.noah@gmail.com> <Beard.noah@gmail.com>
Will Nations <willnationsdev@gmail.com>
Yuri Roubinsky <chaosus89@gmail.com>
Yuri Sizov <pycbouh@users.noreply.github.com> <yuris@humnom.net>
ZX-WT <ZX-WT@ymail.com> <ZX_WT@ymail.com>

View File

@ -1,27 +0,0 @@
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
build:
image: latest
sphinx:
configuration: conf.py
# Possible options: htmlzip, pdf, epub
# All disabled for now:
# - single-page htmlzip is too big to be usable, and requires too much memory.
# - pdf generates too many latex warnings and the build is killed once logs
# reach 4 MB. Could likely be improved if someone is motivated.
# - epub is too big, and has tons of validation errors which make most readers
# treat it as invalid (GH-3862). Also, it's ugly.
# Hopefully one day we'll have a multi-page HTML zip option, but until
# then, all offline download options are worthless.
# (Track https://github.com/readthedocs/readthedocs.org/issues/3242)
formats: []
python:
install:
- requirements: requirements.txt

199
Makefile
View File

@ -1,199 +0,0 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
LATEXDEPS = latex dvipng
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD make variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) -t i18n .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to run only the parse steps without generating output"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/GodotEngine.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/GodotEngine.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/GodotEngine"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/GodotEngine"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) ../sphinx/templates
@echo
@echo "Build finished. The message catalogs are in ../sphinx/templates."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. No output."

View File

@ -1,371 +0,0 @@
# -*- coding: utf-8 -*-
"""
pygments.lexers.gdscript
~~~~~~~~~~~~~~~~~~~~~~
Lexer for GDScript.
:copyright: Copyright 2xxx by The Godot Engine Community
:license: MIT.
modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original python.py pygment
"""
import re
from pygments.lexer import (
RegexLexer,
include,
bygroups,
default,
words,
combined,
)
from pygments.token import (
Text,
Comment,
Operator,
Keyword,
Name,
String,
Number,
Punctuation,
)
__all__ = ["GDScriptLexer"]
line_re = re.compile(".*?\n")
class GDScriptLexer(RegexLexer):
"""
For `GDScript source code <https://www.godotengine.org>`_.
"""
name = "GDScript"
aliases = ["gdscript", "gd"]
filenames = ["*.gd"]
mimetypes = ["text/x-gdscript", "application/x-gdscript"]
def innerstring_rules(ttype):
return [
# the old style '%s' % (...) string formatting
(
r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
"[hlL]?[E-GXc-giorsux%]",
String.Interpol,
),
# backslashes, quotes and formatting signs must be parsed one at a time
(r'[^\\\'"%\n]+', ttype),
(r'[\'"\\]', ttype),
# unhandled string formatting sign
(r"%", ttype),
# newlines are an error (use "nl" state)
]
tokens = {
"root": [
(r"\n", Text),
(
r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
bygroups(Text, String.Affix, String.Doc),
),
(
r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
bygroups(Text, String.Affix, String.Doc),
),
(r"[^\S\n]+", Text),
(r"#.*$", Comment.Single),
(r"[]{}:(),;[]", Punctuation),
(r"\\\n", Text),
(r"\\", Text),
(r"(in|and|or|not)\b", Operator.Word),
(
r"!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]",
Operator,
),
include("keywords"),
(r"(func)((?:\s|\\\s)+)", bygroups(Keyword, Text), "funcname"),
(r"(class)((?:\s|\\\s)+)", bygroups(Keyword, Text), "classname"),
include("builtins"),
(
'([rR]|[uUbB][rR]|[rR][uUbB])(""")',
bygroups(String.Affix, String.Double),
"tdqs",
),
(
"([rR]|[uUbB][rR]|[rR][uUbB])(''')",
bygroups(String.Affix, String.Single),
"tsqs",
),
(
'([rR]|[uUbB][rR]|[rR][uUbB])(")',
bygroups(String.Affix, String.Double),
"dqs",
),
(
"([rR]|[uUbB][rR]|[rR][uUbB])(')",
bygroups(String.Affix, String.Single),
"sqs",
),
(
'([uUbB]?)(""")',
bygroups(String.Affix, String.Double),
combined("stringescape", "tdqs"),
),
(
"([uUbB]?)(''')",
bygroups(String.Affix, String.Single),
combined("stringescape", "tsqs"),
),
(
'([uUbB]?)(")',
bygroups(String.Affix, String.Double),
combined("stringescape", "dqs"),
),
(
"([uUbB]?)(')",
bygroups(String.Affix, String.Single),
combined("stringescape", "sqs"),
),
include("name"),
include("numbers"),
],
"keywords": [
(
words(
(
"and",
"in",
"not",
"or",
"as",
"breakpoint",
"class",
"class_name",
"extends",
"is",
"func",
"setget",
"signal",
"tool",
"const",
"enum",
"export",
"onready",
"static",
"var",
"break",
"continue",
"if",
"elif",
"else",
"for",
"pass",
"return",
"match",
"while",
"remote",
"master",
"puppet",
"remotesync",
"mastersync",
"puppetsync",
),
suffix=r"\b",
),
Keyword,
),
],
"builtins": [
(
words(
(
"Color8",
"ColorN",
"abs",
"acos",
"asin",
"assert",
"atan",
"atan2",
"bytes2var",
"cartesian2polar",
"ceil",
"char",
"clamp",
"convert",
"cos",
"cosh",
"db2linear",
"decimals",
"dectime",
"deep_equal",
"deg2rad",
"dict2inst",
"ease",
"exp",
"floor",
"fmod",
"fposmod",
"funcref",
"get_stack",
"hash",
"inst2dict",
"instance_from_id",
"inverse_lerp",
"is_equal_approx",
"is_inf",
"is_instance_valid",
"is_nan",
"is_zero_approx",
"len",
"lerp",
"lerp_angle",
"linear2db",
"load",
"log",
"max",
"min",
"move_toward",
"nearest_po2",
"ord",
"parse_json",
"polar2cartesian",
"posmod",
"pow",
"preload",
"print",
"print_debug",
"print_stack",
"printerr",
"printraw",
"prints",
"printt",
"push_error",
"push_warning",
"rad2deg",
"rand_range",
"rand_seed",
"randf",
"randi",
"randomize",
"range",
"range_lerp",
"round",
"seed",
"sign",
"sin",
"sinh",
"smoothstep",
"sqrt",
"step_decimals",
"stepify",
"str",
"str2var",
"tan",
"tanh",
"to_json",
"type_exists",
"typeof",
"validate_json",
"var2bytes",
"var2str",
"weakref",
"wrapf",
"wrapi",
"yield",
),
prefix=r"(?<!\.)",
suffix=r"\b",
),
Name.Builtin,
),
(r"((?<!\.)(self|false|true)|(PI|TAU|NAN|INF)" r")\b", Name.Builtin.Pseudo),
(
words(
(
"bool",
"int",
"float",
"String",
"NodePath",
"Vector2",
"Rect2",
"Transform2D",
"Vector3",
"Rect3",
"Plane",
"Quat",
"Basis",
"Transform",
"Color",
"RID",
"Object",
"NodePath",
"Dictionary",
"Array",
"PoolByteArray",
"PoolIntArray",
"PoolRealArray",
"PoolStringArray",
"PoolVector2Array",
"PoolVector3Array",
"PoolColorArray",
"null",
),
prefix=r"(?<!\.)",
suffix=r"\b",
),
Name.Builtin.Type,
),
],
"numbers": [
(r"(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?", Number.Float),
(r"\d+[eE][+-]?[0-9]+j?", Number.Float),
(r"0x[a-fA-F0-9]+", Number.Hex),
(r"0b[01]+", Number.Bin),
(r"\d+j?", Number.Integer),
],
"name": [(r"[a-zA-Z_]\w*", Name)],
"funcname": [(r"[a-zA-Z_]\w*", Name.Function, "#pop"), default("#pop")],
"classname": [(r"[a-zA-Z_]\w*", Name.Class, "#pop")],
"stringescape": [
(
r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
r"U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})",
String.Escape,
)
],
"strings-single": innerstring_rules(String.Single),
"strings-double": innerstring_rules(String.Double),
"dqs": [
(r'"', String.Double, "#pop"),
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
include("strings-double"),
],
"sqs": [
(r"'", String.Single, "#pop"),
(r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
include("strings-single"),
],
"tdqs": [
(r'"""', String.Double, "#pop"),
include("strings-double"),
(r"\n", String.Double),
],
"tsqs": [
(r"'''", String.Single, "#pop"),
include("strings-single"),
(r"\n", String.Single),
],
}
def setup(sphinx):
sphinx.add_lexer("gdscript", GDScriptLexer)
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
}

View File

@ -1,120 +0,0 @@
# -*- coding: utf-8 -*-
"""
godot_descriptions
~~~~~~~~~~~~~~~~~~
Sphinx extension to automatically generate HTML meta description tags
for all pages. Also comes with some special support for Godot class docs.
:copyright: Copyright 2021 by The Godot Engine Community
:license: MIT.
based on the work of Takayuki Shimizukawa on OpenGraph support for Sphinx,
see: https://github.com/sphinx-contrib/ogp
"""
import re
from docutils import nodes
from sphinx import addnodes
class DescriptionGenerator:
def __init__(self, document, pagename="", n_sections_max=3, max_length=220):
self.document = document
self.text_list = []
self.max_length = max_length
self.current_length = 0
self.n_sections = 0
self.n_sections_max = n_sections_max
self.pagename = pagename
self.is_class = pagename.startswith("classes/")
self.stop_word_reached = False
def dispatch_visit(self, node):
if (
self.stop_word_reached
or self.current_length > self.max_length
or self.n_sections > self.n_sections_max
):
return
if isinstance(node, addnodes.compact_paragraph) and node.get("toctree"):
raise nodes.SkipChildren
add = True
if isinstance(node, nodes.paragraph):
text = node.astext()
if self.is_class:
# Skip OOP hierarchy info for description
if (
text.startswith("Inherits:")
or text.startswith("Inherited By:")
or text.strip() == "Example:"
):
add = False
# If we're in a class doc and reached the first table,
# stop adding to the description
if text.strip() == "Properties":
self.stop_word_reached = True
add = False
if add:
self.text_list.append(text)
self.current_length = self.current_length + len(text)
if add and isinstance(node, nodes.section):
self.n_sections += 1
def dispatch_departure(self, node):
pass
def format_description(self, desc):
# Replace newlines with spaces
desc = re.sub("\r|\n", " ", desc)
# Replace multiple spaces with single spaces
desc = re.sub("\\s+", " ", desc)
# Escape double quotes for HTML
desc = re.sub('"', "&quot;", desc)
return desc
def create_description(self, cutoff_suffix="..."):
text = " ".join(self.text_list)
text = self.format_description(text)
# Cut to self.max_length, add cutoff_suffix at end
if len(text) > self.max_length:
text = text[: self.max_length - len(cutoff_suffix)].strip() + cutoff_suffix
return text
def generate_description(app, pagename, templatename, context, doctree):
if not doctree:
return
generator = DescriptionGenerator(doctree, pagename)
doctree.walkabout(generator)
description = (
'<meta name="description" content="' + generator.create_description() + '">\n'
)
context["metatags"] += description
def setup(app):
# Hook into Sphinx for all pages to
# generate meta description tag and add to meta tag list
app.connect("html-page-context", generate_description)
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
}

File diff suppressed because it is too large Load Diff

View File

@ -1,213 +0,0 @@
// Handle page scroll and adjust sidebar accordingly.
// Each page has two scrolls: the main scroll, which is moving the content of the page;
// and the sidebar scroll, which is moving the navigation in the sidebar.
// We want the logo to gradually disappear as the main content is scrolled, giving
// more room to the navigation on the left. This means adjusting the height
// available to the navigation on the fly. There is also a banner below the navigation
// that must be dealt with simultaneously.
const registerOnScrollEvent = (function(){
// Configuration.
// The number of pixels the user must scroll by before the logo is completely hidden.
const scrollTopPixels = 234;
// The target margin to be applied to the navigation bar when the logo is hidden.
const menuTopMargin = 90;
// The max-height offset when the logo is completely visible.
const menuHeightOffset_default = 338;
// The max-height offset when the logo is completely hidden.
const menuHeightOffset_fixed = 102;
// The distance between the two max-height offset values above; used for intermediate values.
const menuHeightOffset_diff = (menuHeightOffset_default - menuHeightOffset_fixed);
// Media query handler.
return function(mediaQuery) {
// We only apply this logic to the "desktop" resolution (defined by a media query at the bottom).
// This handler is executed when the result of the query evaluation changes, which means that
// the page has moved between "desktop" and "mobile" states.
// When entering the "desktop" state, we register scroll events and adjust elements on the page.
// When entering the "mobile" state, we clean up any registered events and restore elements on the page
// to their initial state.
const $window = $(window);
const $sidebar = $('.wy-side-scroll');
const $search = $sidebar.children('.wy-side-nav-search');
const $menu = $sidebar.children('.wy-menu-vertical');
const $ethical = $sidebar.children('.ethical-rtd');
// This padding is needed to correctly adjust the height of the scrollable area in the sidebar.
// It has to have the same height as the ethical block, if there is one.
let $menuPadding = $menu.children('.wy-menu-ethical-padding');
if ($menuPadding.length == 0) {
$menuPadding = $('<div class="wy-menu-ethical-padding"></div>');
$menu.append($menuPadding);
}
if (mediaQuery.matches) {
// Entering the "desktop" state.
// The main scroll event handler.
// Executed as the page is scrolled and once immediatelly as the page enters this state.
const handleMainScroll = (currentScroll) => {
if (currentScroll >= scrollTopPixels) {
// After the page is scrolled below the threshold, we fix everything in place.
$search.css('margin-top', `-${scrollTopPixels}px`);
$menu.css('margin-top', `${menuTopMargin}px`);
$menu.css('max-height', `calc(100% - ${menuHeightOffset_fixed}px)`);
}
else {
// Between the top of the page and the threshold we calculate intermediate values
// to guarantee a smooth transition.
$search.css('margin-top', `-${currentScroll}px`);
$menu.css('margin-top', `${menuTopMargin + (scrollTopPixels - currentScroll)}px`);
if (currentScroll > 0) {
const scrolledPercent = (scrollTopPixels - currentScroll) / scrollTopPixels;
const offsetValue = menuHeightOffset_fixed + menuHeightOffset_diff * scrolledPercent;
$menu.css('max-height', `calc(100% - ${offsetValue}px)`);
} else {
$menu.css('max-height', `calc(100% - ${menuHeightOffset_default}px)`);
}
}
};
// The sidebar scroll event handler.
// Executed as the sidebar is scrolled as well as after the main scroll. This is needed
// because the main scroll can affect the scrollable area of the sidebar.
const handleSidebarScroll = () => {
const menuElement = $menu.get(0);
const menuScrollTop = $menu.scrollTop();
const menuScrollBottom = menuElement.scrollHeight - (menuScrollTop + menuElement.offsetHeight);
// As the navigation is scrolled we add a shadow to the top bar hanging over it.
if (menuScrollTop > 0) {
$search.addClass('fixed-and-scrolled');
} else {
$search.removeClass('fixed-and-scrolled');
}
// Near the bottom we start moving the sidebar banner into view.
if (menuScrollBottom < ethicalOffsetBottom) {
$ethical.css('display', 'block');
$ethical.css('margin-top', `-${ethicalOffsetBottom - menuScrollBottom}px`);
} else {
$ethical.css('display', 'none');
$ethical.css('margin-top', '0px');
}
};
$search.addClass('fixed');
$ethical.addClass('fixed');
// Adjust the inner height of navigation so that the banner can be overlaid there later.
const ethicalOffsetBottom = $ethical.height() || 0;
if (ethicalOffsetBottom) {
$menuPadding.css('height', `${ethicalOffsetBottom}px`);
} else {
$menuPadding.css('height', `0px`);
}
$window.scroll(function() {
handleMainScroll(window.scrollY);
handleSidebarScroll();
});
$menu.scroll(function() {
handleSidebarScroll();
});
handleMainScroll(window.scrollY);
handleSidebarScroll();
} else {
// Entering the "mobile" state.
$window.unbind('scroll');
$menu.unbind('scroll');
$search.removeClass('fixed');
$ethical.removeClass('fixed');
$search.css('margin-top', `0px`);
$menu.css('margin-top', `0px`);
$menu.css('max-height', 'initial');
$menuPadding.css('height', `0px`);
$ethical.css('margin-top', '0px');
$ethical.css('display', 'block');
}
};
})();
// Subscribe to DOM changes in the sidebar container, because there is a
// banner that gets added at a later point, that we might not catch otherwise.
const registerSidebarObserver = (function(){
return function(callback) {
const sidebarContainer = document.querySelector('.wy-side-scroll');
let sidebarEthical = null;
const registerEthicalObserver = () => {
if (sidebarEthical) {
// Do it only once.
return;
}
sidebarEthical = sidebarContainer.querySelector('.ethical-rtd');
if (!sidebarEthical) {
// Do it only after we have the element there.
return;
}
// This observer watches over the ethical block in sidebar, and all of its subtree.
const ethicalObserverConfig = { childList: true, subtree: true };
const ethicalObserverCallback = (mutationsList, observer) => {
for (let mutation of mutationsList) {
if (mutation.type !== 'childList') {
continue;
}
callback();
}
};
const ethicalObserver = new MutationObserver(ethicalObserverCallback);
ethicalObserver.observe(sidebarEthical, ethicalObserverConfig);
};
registerEthicalObserver();
// This observer watches over direct children of the main sidebar container.
const observerConfig = { childList: true };
const observerCallback = (mutationsList, observer) => {
for (let mutation of mutationsList) {
if (mutation.type !== 'childList') {
continue;
}
callback();
registerEthicalObserver();
}
};
const observer = new MutationObserver(observerCallback);
observer.observe(sidebarContainer, observerConfig);
};
})();
$(document).ready(() => {
const mediaQuery = window.matchMedia('only screen and (min-width: 769px)');
registerOnScrollEvent(mediaQuery);
mediaQuery.addListener(registerOnScrollEvent);
registerSidebarObserver(() => {
registerOnScrollEvent(mediaQuery);
});
// Load instant.page to prefetch pages upon hovering. This makes navigation feel
// snappier. The script is dynamically appended as Read the Docs doesn't have
// a way to add scripts with a "module" attribute.
const instantPageScript = document.createElement('script');
instantPageScript.toggleAttribute('module');
/*! instant.page v5.1.0 - (C) 2019-2020 Alexandre Dieulot - https://instant.page/license */
instantPageScript.innerText = 'let t,e;const n=new Set,o=document.createElement("link"),i=o.relList&&o.relList.supports&&o.relList.supports("prefetch")&&window.IntersectionObserver&&"isIntersecting"in IntersectionObserverEntry.prototype,s="instantAllowQueryString"in document.body.dataset,a="instantAllowExternalLinks"in document.body.dataset,r="instantWhitelist"in document.body.dataset,c="instantMousedownShortcut"in document.body.dataset,d=1111;let l=65,u=!1,f=!1,m=!1;if("instantIntensity"in document.body.dataset){const t=document.body.dataset.instantIntensity;if("mousedown"==t.substr(0,"mousedown".length))u=!0,"mousedown-only"==t&&(f=!0);else if("viewport"==t.substr(0,"viewport".length))navigator.connection&&(navigator.connection.saveData||navigator.connection.effectiveType&&navigator.connection.effectiveType.includes("2g"))||("viewport"==t?document.documentElement.clientWidth*document.documentElement.clientHeight<45e4&&(m=!0):"viewport-all"==t&&(m=!0));else{const e=parseInt(t);isNaN(e)||(l=e)}}if(i){const n={capture:!0,passive:!0};if(f||document.addEventListener("touchstart",function(t){e=performance.now();const n=t.target.closest("a");if(!h(n))return;v(n.href)},n),u?c||document.addEventListener("mousedown",function(t){const e=t.target.closest("a");if(!h(e))return;v(e.href)},n):document.addEventListener("mouseover",function(n){if(performance.now()-e<d)return;const o=n.target.closest("a");if(!h(o))return;o.addEventListener("mouseout",p,{passive:!0}),t=setTimeout(()=>{v(o.href),t=void 0},l)},n),c&&document.addEventListener("mousedown",function(t){if(performance.now()-e<d)return;const n=t.target.closest("a");if(t.which>1||t.metaKey||t.ctrlKey)return;if(!n)return;n.addEventListener("click",function(t){1337!=t.detail&&t.preventDefault()},{capture:!0,passive:!1,once:!0});const o=new MouseEvent("click",{view:window,bubbles:!0,cancelable:!1,detail:1337});n.dispatchEvent(o)},n),m){let t;(t=window.requestIdleCallback?t=>{requestIdleCallback(t,{timeout:1500})}:t=>{t()})(()=>{const t=new IntersectionObserver(e=>{e.forEach(e=>{if(e.isIntersecting){const n=e.target;t.unobserve(n),v(n.href)}})});document.querySelectorAll("a").forEach(e=>{h(e)&&t.observe(e)})})}}function p(e){e.relatedTarget&&e.target.closest("a")==e.relatedTarget.closest("a")||t&&(clearTimeout(t),t=void 0)}function h(t){if(t&&t.href&&(!r||"instant"in t.dataset)&&(a||t.origin==location.origin||"instant"in t.dataset)&&["http:","https:"].includes(t.protocol)&&("http:"!=t.protocol||"https:"!=location.protocol)&&(s||!t.search||"instant"in t.dataset)&&!(t.hash&&t.pathname+t.search==location.pathname+location.search||"noInstant"in t.dataset))return!0}function v(t){if(n.has(t))return;const e=document.createElement("link");e.rel="prefetch",e.href=t,document.head.appendChild(e),n.add(t)}';
document.head.appendChild(instantPageScript);
});

View File

@ -1,24 +0,0 @@
{%- extends "sphinx_rtd_theme/breadcrumbs.html" %}
{% block breadcrumbs_aside %}
{% if not meta or meta.get('github_url') != 'hide' %}
{{ super() }}
<style>
.godot-edit-guidelines {
font-size: 14px;
float: right;
clear: both;
}
@media screen and (max-width: 480px) {
.godot-edit-guidelines {
display: none;
}
}
</style>
<a class="godot-edit-guidelines" href="https://docs.godotengine.org/en/latest/community/contributing/index.html#writing-documentation">
Learn how to contribute!
</a>
{% endif %}
{% endblock %}

View File

@ -1,19 +0,0 @@
{% extends "!layout.html" -%}
{% block linktags -%}
<meta name="theme-color" content="#3d8fcc">
{% if godot_inject_language_links -%}
{% for alternate_lang in godot_docs_supported_languages -%}
{# Convert to ISO 639-1 format, e.g. zh_CN -> zh-cn -#}
{% set alternate_lang_href = alternate_lang.lower().replace("_", "-") -%}
<link rel="alternate" hreflang="{{ alternate_lang_href }}" href="{{ godot_docs_basepath }}{{ alternate_lang }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
{% endfor -%}
<link rel="alternate" hreflang="x-default" href="{{ godot_docs_basepath }}{{ godot_default_lang }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
<link rel="canonical" href="{{ godot_docs_basepath }}{{ lang_attr }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
{% endif -%}
{{ super() }}
{% endblock -%}
{% block htmltitle -%}
<title>{{ godot_title_prefix }}{{ title|striptags|e }}{{ titlesuffix }}</title>
{% endblock -%}

290
conf.py
View File

@ -1,290 +0,0 @@
# -*- coding: utf-8 -*-
#
# Godot Engine documentation build configuration file
import sphinx_rtd_theme
import sys
import os
# -- General configuration ------------------------------------------------
needs_sphinx = "1.3"
# Sphinx extension module names and templates location
sys.path.append(os.path.abspath("_extensions"))
extensions = [
"sphinx_tabs.tabs",
"notfound.extension",
"sphinxext.opengraph",
]
# Warning when the Sphinx Tabs extension is used with unknown
# builders (like the dummy builder) - as it doesn't cause errors,
# we can ignore this so we still can treat other warnings as errors.
sphinx_tabs_nowarn = True
# Custom 4O4 page HTML template.
# https://github.com/readthedocs/sphinx-notfound-page
notfound_context = {
"title": "Page not found",
"body": """
<h1>Page not found</h1>
<p>
Sorry, we couldn't find that page. It may have been renamed or removed
in the version of the documentation you're currently browsing.
</p>
<p>
If you're currently browsing the
<em>stable</em> version of the documentation, try browsing the
<a href="/en/latest/"><em>latest</em> version of the documentation</a>.
</p>
<p>
Alternatively, use the
<a href="#" onclick="$('#rtd-search-form [name=\\'q\\']').focus()">Search docs</a>
box on the left or <a href="/">go to the homepage</a>.
</p>
""",
}
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
# Don't add `/en/latest` prefix during local development.
# This makes it easier to test the custom 404 page by loading `/404.html`
# on a local web server.
if not on_rtd:
notfound_urls_prefix = ''
# Specify the site name for the Open Graph extension.
ogp_site_name = "Godot Engine documentation"
if not os.getenv("SPHINX_NO_GDSCRIPT"):
extensions.append("gdscript")
if not os.getenv("SPHINX_NO_DESCRIPTIONS"):
extensions.append("godot_descriptions")
templates_path = ["_templates"]
# You can specify multiple suffix as a list of string: ['.rst', '.md']
source_suffix = ".rst"
source_encoding = "utf-8-sig"
# The master toctree document
master_doc = "index"
# General information about the project
project = "Godot Engine"
copyright = (
"2014-2022, Juan Linietsky, Ariel Manzur and the Godot community (CC-BY 3.0)"
)
author = "Juan Linietsky, Ariel Manzur and the Godot community"
# Version info for the project, acts as replacement for |version| and |release|
# The short X.Y version
version = os.getenv("READTHEDOCS_VERSION", "3.5")
# The full version, including alpha/beta/rc tags
release = version
# Parse Sphinx tags passed from RTD via environment
env_tags = os.getenv("SPHINX_TAGS")
if env_tags is not None:
for tag in env_tags.split(","):
print("Adding Sphinx tag: %s" % tag.strip())
tags.add(tag.strip()) # noqa: F821
# Language / i18n
supported_languages = {
"en": "Godot Engine (%s) documentation in English",
"de": "Godot Engine (%s) Dokumentation auf Deutsch",
"es": "Documentación de Godot Engine (%s) en español",
"fr": "Documentation de Godot Engine (%s) en français",
"fi": "Godot Engine (%s) dokumentaatio suomeksi",
"it": "Godot Engine (%s) documentazione in italiano",
"ja": "Godot Engine (%s)の日本語のドキュメント",
"ko": "Godot Engine (%s) 문서 (한국어)",
"pl": "Dokumentacja Godot Engine (%s) w języku polskim",
"pt_BR": "Documentação da Godot Engine (%s) em Português Brasileiro",
"ru": "Документация Godot Engine (%s) на русском языке",
"uk": "Документація до Godot Engine (%s) українською мовою",
"zh_CN": "Godot Engine (%s) 简体中文文档",
"zh_TW": "Godot Engine (%s) 正體中文 (台灣) 文件",
}
language = os.getenv("READTHEDOCS_LANGUAGE", "en")
if not language in supported_languages.keys():
print("Unknown language: " + language)
print("Supported languages: " + ", ".join(supported_languages.keys()))
print(
"The configured language is either wrong, or it should be added to supported_languages in conf.py. Falling back to 'en'."
)
language = "en"
is_i18n = tags.has("i18n") # noqa: F821
exclude_patterns = ["_build"]
# fmt: off
# These imports should *not* be moved to the start of the file,
# they depend on the sys.path.append call registering "_extensions".
# GDScript syntax highlighting
from gdscript import GDScriptLexer
from sphinx.highlighting import lexers
lexers["gdscript"] = GDScriptLexer()
# fmt: on
smartquotes = False
# Pygments (syntax highlighting) style to use
pygments_style = "sphinx"
highlight_language = "gdscript"
# -- Options for HTML output ----------------------------------------------
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
if on_rtd:
using_rtd_theme = True
# Theme options
html_theme_options = {
# if we have a html_logo below, this shows /only/ the logo with no title text
"logo_only": True,
# Collapse navigation (False makes it tree-like)
"collapse_navigation": False,
}
html_title = supported_languages[language] % version
# VCS options: https://docs.readthedocs.io/en/latest/vcs.html#github
html_context = {
"display_github": not is_i18n, # Integrate GitHub
"github_user": "godotengine", # Username
"github_repo": "godot-docs", # Repo name
"github_version": "master", # Version
"conf_py_path": "/", # Path in the checkout to the docs root
"godot_inject_language_links": True,
"godot_docs_supported_languages": list(supported_languages.keys()),
"godot_docs_basepath": "https://docs.godotengine.org/",
"godot_docs_suffix": ".html",
"godot_default_lang": "en",
"godot_canonical_version": "stable",
# Distinguish local development website from production website.
# This prevents people from looking for changes on the production website after making local changes :)
"godot_title_prefix": "" if on_rtd else "(DEV) ",
}
html_logo = "img/docs_logo.png"
# These folders are copied to the documentation's HTML output
html_static_path = ["_static"]
html_extra_path = ["robots.txt"]
# These paths are either relative to html_static_path
# or fully qualified paths (e.g. https://...)
html_css_files = [
"css/custom.css",
]
html_js_files = [
"js/custom.js",
]
# Output file base name for HTML help builder
htmlhelp_basename = "GodotEnginedoc"
# -- Options for reStructuredText parser ----------------------------------
# Enable directives that insert the contents of external files
file_insertion_enabled = False
# -- Options for LaTeX output ---------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"GodotEngine.tex",
"Godot Engine Documentation",
"Juan Linietsky, Ariel Manzur and the Godot community",
"manual",
),
]
# -- Options for linkcheck builder ----------------------------------------
# disable checking urls with about.html#this_part_of_page anchors
linkcheck_anchors = False
linkcheck_timeout = 10
# -- I18n settings --------------------------------------------------------
# Godot localization is handled via https://github.com/godotengine/godot-docs-l10n
# where the main docs repo is a submodule. Therefore the translated material is
# actually in the parent folder of this conf.py, hence the "../".
locale_dirs = ["../sphinx/po/"]
gettext_compact = False
# We want to host the localized images in godot-docs-l10n, but Sphinx does not provide
# the necessary feature to do so. `figure_language_filename` has `{root}` and `{path}`,
# but they resolve to (host) absolute paths, so we can't use them as is to access "../".
# However, Python is glorious and lets us redefine Sphinx's internal method that handles
# `figure_language_filename`, so we do our own post-processing to fix the absolute path
# and point to the parallel folder structure in godot-docs-l10n.
# Note: Sphinx's handling of `figure_language_filename` may change in the future, monitor
# https://github.com/sphinx-doc/sphinx/issues/7768 to see what would be relevant for us.
figure_language_filename = "{root}.{language}{ext}"
import sphinx
cwd = os.getcwd()
sphinx_original_get_image_filename_for_language = sphinx.util.i18n.get_image_filename_for_language
def godot_get_image_filename_for_language(filename, env):
"""
Hack the absolute path returned by Sphinx based on `figure_language_filename`
to insert our `../images` relative path to godot-docs-l10n's images folder,
which mirrors the folder structure of the docs repository.
The returned string should also be absolute so that `os.path.exists` can properly
resolve it when trying to concatenate with the original doc folder.
"""
path = sphinx_original_get_image_filename_for_language(filename, env)
path = os.path.abspath(os.path.join("../images/", os.path.relpath(path, cwd)))
return path
sphinx.util.i18n.get_image_filename_for_language = godot_get_image_filename_for_language
# Similar story for the localized class reference, it's out of tree and there doesn't
# seem to be an easy way for us to tweak the toctree to take this into account.
# So we're deleting the existing class reference and adding a symlink instead...
if is_i18n and os.path.exists("../classes/" + language):
import shutil
if os.path.islink("classes"): # Previously made symlink.
os.unlink("classes")
else:
shutil.rmtree("classes")
os.symlink("../classes/" + language, "classes")
# Couldn't find a way to retrieve variables nor do advanced string
# concat from reST, so had to hardcode this in the "epilog" added to
# all pages. This is used in index.rst to display the Weblate badge.
# On English pages, the badge points to the language-neutral engage page.
rst_epilog = """
.. |weblate_widget| image:: https://hosted.weblate.org/widgets/godot-engine/{image_locale}/godot-docs/287x66-white.png
:alt: Translation status
:target: https://hosted.weblate.org/engage/godot-engine{target_locale}/?utm_source=widget
:width: 287
:height: 66
""".format(
image_locale="-" if language == "en" else language,
target_locale="" if language == "en" else "/" + language,
)

View File

@ -1,36 +0,0 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation on Windows
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=Test
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd

View File

@ -1,16 +0,0 @@
# Base dependencies
# Sync with readthedocs:
# https://github.com/readthedocs/readthedocs.org/blob/master/requirements/pip.txt
# https://github.com/readthedocs/readthedocs.org/blob/master/requirements/docs.txt
sphinx==4.4.0
sphinx_rtd_theme==1.0.0
# Code tabs extension for GDScript/C#
sphinx-tabs==3.3.1
# Custom 404 error page (more useful than the default)
sphinx-notfound-page==0.8.3
# Adds Open Graph tags in the HTML `<head>` tag
sphinxext-opengraph==0.6.3

View File

@ -1,3 +0,0 @@
user-agent: *
sitemap: https://docs.godotengine.org/sitemap.xml