Update for Vulkan-Docs 1.3.241
This commit is contained in:
parent
e8b8e06d09
commit
bd6443d28f
47 changed files with 3638 additions and 968 deletions
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2021-2022 The Khronos Group Inc.
|
||||
# Copyright 2021-2023 The Khronos Group Inc.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# Generic alias for working group-specific API conventions interface.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
@ -346,6 +346,8 @@ class COutputGenerator(OutputGenerator):
|
|||
body += self.genOpts.apientry + noneStr(elem.tail)
|
||||
else:
|
||||
body += noneStr(elem.text) + noneStr(elem.tail)
|
||||
if category == 'define' and self.misracppstyle():
|
||||
body = body.replace("(uint32_t)", "static_cast<uint32_t>")
|
||||
if body:
|
||||
# Add extra newline after multi-line entries.
|
||||
if '\n' in body[0:-1]:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""Base class for source/header/doc generators, as well as some utility functions."""
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
@ -9,6 +9,7 @@ import os
|
|||
import pdb
|
||||
import re
|
||||
import sys
|
||||
import copy
|
||||
import time
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
|
|
@ -119,7 +120,7 @@ def makeGenOpts(args):
|
|||
# The SPDX formatting below works around constraints of the 'reuse' tool
|
||||
prefixStrings = [
|
||||
'/*',
|
||||
'** Copyright 2015-2022 The Khronos Group Inc.',
|
||||
'** Copyright 2015-2023 The Khronos Group Inc.',
|
||||
'**',
|
||||
'** SPDX-License-Identifier' + ': Apache-2.0',
|
||||
'*/',
|
||||
|
|
@ -141,7 +142,10 @@ def makeGenOpts(args):
|
|||
# An API style conventions object
|
||||
conventions = APIConventions()
|
||||
|
||||
defaultAPIName = conventions.xml_api_name
|
||||
if args.apiname is not None:
|
||||
defaultAPIName = args.apiname
|
||||
else:
|
||||
defaultAPIName = conventions.xml_api_name
|
||||
|
||||
# API include files for spec and ref pages
|
||||
# Overwrites include subdirectories in spec source tree
|
||||
|
|
@ -699,6 +703,9 @@ def genTarget(args):
|
|||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-apiname', action='store',
|
||||
default=None,
|
||||
help='Specify API to generate (defaults to repository-specific conventions object value)')
|
||||
parser.add_argument('-defaultExtensions', action='store',
|
||||
default=APIConventions().xml_api_name,
|
||||
help='Specify a single class of extensions to add to targets')
|
||||
|
|
|
|||
315
registry/parse_dependency.py
Executable file
315
registry/parse_dependency.py
Executable file
|
|
@ -0,0 +1,315 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
# Copyright 2022-2023 The Khronos Group Inc.
|
||||
# Copyright 2003-2019 Paul McGuire
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# apirequirements.py - parse 'depends' expressions in API XML
|
||||
# Supported methods:
|
||||
# dependency - the expression string
|
||||
#
|
||||
# evaluateDependency(dependency, isSupported) evaluates the expression,
|
||||
# returning a boolean result. isSupported takes an extension or version name
|
||||
# string and returns a boolean.
|
||||
#
|
||||
# dependencyLanguage(dependency) returns an English string equivalent
|
||||
# to the expression, suitable for header file comments.
|
||||
#
|
||||
# dependencyNames(dependency) returns a set of the extension and
|
||||
# version names in the expression.
|
||||
#
|
||||
# dependencyMarkup(dependency) returns a string containing asciidoctor
|
||||
# markup for English equivalent to the expression, suitable for extension
|
||||
# appendices.
|
||||
#
|
||||
# All may throw a ParseException if the expression cannot be parsed or is
|
||||
# not completely consumed by parsing.
|
||||
|
||||
# Supported expressions at present:
|
||||
# - extension names
|
||||
# - '+' as AND connector
|
||||
# - ',' as OR connector
|
||||
# - parenthesization for grouping
|
||||
|
||||
# Based on https://github.com/pyparsing/pyparsing/blob/master/examples/fourFn.py
|
||||
|
||||
from pyparsing import (
|
||||
Literal,
|
||||
Word,
|
||||
Group,
|
||||
Forward,
|
||||
alphas,
|
||||
alphanums,
|
||||
Regex,
|
||||
ParseException,
|
||||
CaselessKeyword,
|
||||
Suppress,
|
||||
delimitedList,
|
||||
infixNotation,
|
||||
)
|
||||
import math
|
||||
import operator
|
||||
import pyparsing as pp
|
||||
import re
|
||||
|
||||
def nameMarkup(name):
|
||||
"""Returns asciidoc markup to generate a link to an API version or
|
||||
extension anchor.
|
||||
|
||||
- name - version or extension name"""
|
||||
|
||||
# Could use ApiConventions.is_api_version_name, but that does not split
|
||||
# out the major/minor version numbers.
|
||||
match = re.search("[A-Z]+_VERSION_([0-9]+)_([0-9]+)", name)
|
||||
if match is not None:
|
||||
major = match.group(1)
|
||||
minor = match.group(2)
|
||||
version = major + '.' + minor
|
||||
return f'<<versions-{major}.{minor}, Version {version}>>'
|
||||
else:
|
||||
return 'apiext:' + name
|
||||
|
||||
exprStack = []
|
||||
|
||||
def push_first(toks):
|
||||
"""Push a token on the global stack
|
||||
|
||||
- toks - first element is the token to push"""
|
||||
|
||||
exprStack.append(toks[0])
|
||||
|
||||
# An identifier (version or extension name)
|
||||
dependencyIdent = Word(alphanums + '_')
|
||||
|
||||
# Infix expression for depends expressions
|
||||
dependencyExpr = pp.infixNotation(dependencyIdent,
|
||||
[ (pp.oneOf(', +'), 2, pp.opAssoc.LEFT), ])
|
||||
|
||||
# BNF grammar for depends expressions
|
||||
_bnf = None
|
||||
def dependencyBNF():
|
||||
"""
|
||||
boolop :: '+' | ','
|
||||
extname :: Char(alphas)
|
||||
atom :: extname | '(' expr ')'
|
||||
expr :: atom [ boolop atom ]*
|
||||
"""
|
||||
global _bnf
|
||||
if _bnf is None:
|
||||
and_, or_ = map(Literal, '+,')
|
||||
lpar, rpar = map(Suppress, '()')
|
||||
boolop = and_ | or_
|
||||
|
||||
expr = Forward()
|
||||
expr_list = delimitedList(Group(expr))
|
||||
atom = (
|
||||
boolop[...]
|
||||
+ (
|
||||
(dependencyIdent).setParseAction(push_first)
|
||||
| Group(lpar + expr + rpar)
|
||||
)
|
||||
)
|
||||
|
||||
expr <<= atom + (boolop + atom).setParseAction(push_first)[...]
|
||||
_bnf = expr
|
||||
return _bnf
|
||||
|
||||
|
||||
# map operator symbols to corresponding arithmetic operations
|
||||
_opn = {
|
||||
'+': operator.and_,
|
||||
',': operator.or_,
|
||||
}
|
||||
|
||||
# map operator symbols to corresponding words
|
||||
_opname = {
|
||||
'+': 'and',
|
||||
',': 'or',
|
||||
}
|
||||
|
||||
def evaluateStack(stack, isSupported):
|
||||
"""Evaluate an expression stack, returning a boolean result.
|
||||
|
||||
- stack - the stack
|
||||
- isSupported - function taking a version or extension name string and
|
||||
returning True or False if that name is supported or not."""
|
||||
|
||||
op, num_args = stack.pop(), 0
|
||||
if isinstance(op, tuple):
|
||||
op, num_args = op
|
||||
|
||||
if op in '+,':
|
||||
# Note: operands are pushed onto the stack in reverse order
|
||||
op2 = evaluateStack(stack, isSupported)
|
||||
op1 = evaluateStack(stack, isSupported)
|
||||
return _opn[op](op1, op2)
|
||||
elif op[0].isalpha():
|
||||
return isSupported(op)
|
||||
else:
|
||||
raise Exception(f'invalid op: {op}')
|
||||
|
||||
def evaluateDependency(dependency, isSupported):
|
||||
"""Evaluate a dependency expression, returning a boolean result.
|
||||
|
||||
- dependency - the expression
|
||||
- isSupported - function taking a version or extension name string and
|
||||
returning True or False if that name is supported or not."""
|
||||
|
||||
global exprStack
|
||||
exprStack = []
|
||||
results = dependencyBNF().parseString(dependency, parseAll=True)
|
||||
val = evaluateStack(exprStack[:], isSupported)
|
||||
return val
|
||||
|
||||
def evalDependencyLanguage(stack, specmacros):
|
||||
"""Evaluate an expression stack, returning an English equivalent
|
||||
|
||||
- stack - the stack
|
||||
- specmacros - if True, prepare the language for spec inclusion"""
|
||||
|
||||
op, num_args = stack.pop(), 0
|
||||
if isinstance(op, tuple):
|
||||
op, num_args = op
|
||||
if op in '+,':
|
||||
# Could parenthesize, not needed yet
|
||||
rhs = evalDependencyLanguage(stack, specmacros)
|
||||
return evalDependencyLanguage(stack, specmacros) + f' {_opname[op]} ' + rhs
|
||||
elif op[0].isalpha():
|
||||
# This is an extension or feature name
|
||||
if specmacros:
|
||||
return nameMarkup(op)
|
||||
else:
|
||||
return op
|
||||
else:
|
||||
raise Exception(f'invalid op: {op}')
|
||||
|
||||
def dependencyLanguage(dependency, specmacros = False):
|
||||
"""Return an API dependency expression translated to a form suitable for
|
||||
asciidoctor conditionals or header file comments.
|
||||
|
||||
- dependency - the expression
|
||||
- specmacros - if False, return a string that can be used as an
|
||||
asciidoctor conditional.
|
||||
If True, return a string suitable for spec inclusion with macros and
|
||||
xrefs included."""
|
||||
|
||||
global exprStack
|
||||
exprStack = []
|
||||
results = dependencyBNF().parseString(dependency, parseAll=True)
|
||||
return evalDependencyLanguage(exprStack, specmacros)
|
||||
|
||||
def evalDependencyNames(stack):
|
||||
"""Evaluate an expression stack, returning the set of extension and
|
||||
feature names used in the expression.
|
||||
|
||||
- stack - the stack"""
|
||||
|
||||
op, num_args = stack.pop(), 0
|
||||
if isinstance(op, tuple):
|
||||
op, num_args = op
|
||||
if op in '+,':
|
||||
# Do not evaluate the operation. We only care about the names.
|
||||
return evalDependencyNames(stack) | evalDependencyNames(stack)
|
||||
elif op[0].isalpha():
|
||||
return { op }
|
||||
else:
|
||||
raise Exception(f'invalid op: {op}')
|
||||
|
||||
def dependencyNames(dependency):
|
||||
"""Return a set of the extension and version names in an API dependency
|
||||
expression. Used when determining transitive dependencies for spec
|
||||
generation with specific extensions included.
|
||||
|
||||
- dependency - the expression"""
|
||||
|
||||
global exprStack
|
||||
exprStack = []
|
||||
results = dependencyBNF().parseString(dependency, parseAll=True)
|
||||
# print(f'names(): stack = {exprStack}')
|
||||
return evalDependencyNames(exprStack)
|
||||
|
||||
def markupTraverse(expr, level = 0, root = True):
|
||||
"""Recursively process a dependency in infix form, transforming it into
|
||||
asciidoctor markup with expression nesting indicated by indentation
|
||||
level.
|
||||
|
||||
- expr - expression to process
|
||||
- level - indentation level to render expression at
|
||||
- root - True only on initial call"""
|
||||
|
||||
if level > 0:
|
||||
prefix = '{nbsp}{nbsp}' * level * 2 + ' '
|
||||
else:
|
||||
prefix = ''
|
||||
str = ''
|
||||
|
||||
for elem in expr:
|
||||
if isinstance(elem, pp.ParseResults):
|
||||
if not root:
|
||||
nextlevel = level + 1
|
||||
else:
|
||||
# Do not indent the outer expression
|
||||
nextlevel = level
|
||||
|
||||
str = str + markupTraverse(elem, level = nextlevel, root = False)
|
||||
elif elem in ('+', ','):
|
||||
str = str + f'{prefix}{_opname[elem]} +\n'
|
||||
else:
|
||||
str = str + f'{prefix}{nameMarkup(elem)} +\n'
|
||||
|
||||
return str
|
||||
|
||||
def dependencyMarkup(dependency):
|
||||
"""Return asciidoctor markup for a human-readable equivalent of an API
|
||||
dependency expression, suitable for use in extension appendix
|
||||
metadata.
|
||||
|
||||
- dependency - the expression"""
|
||||
|
||||
parsed = dependencyExpr.parseString(dependency)
|
||||
return markupTraverse(parsed)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
termdict = {
|
||||
'VK_VERSION_1_1' : True,
|
||||
'f' : False,
|
||||
't' : True,
|
||||
'false' : False,
|
||||
'true' : True,
|
||||
}
|
||||
termSupported = lambda name: name in termdict and termdict[name]
|
||||
|
||||
for dependency in [
|
||||
't',
|
||||
#'t+t+f',
|
||||
#'t+(t+f),(f,t))',
|
||||
#'t+((t+f),(f,t)))',
|
||||
'VK_VERSION_1_1+(t,f)',
|
||||
]:
|
||||
print(f'expr = {dependency}\n{dependencyMarkup(dependency)}')
|
||||
print(f' language = {dependencyLanguage(dependency)}')
|
||||
print(f' names = {dependencyNames(dependency)}')
|
||||
print(f' value = {evaluateDependency(dependency, termSupported)}')
|
||||
|
||||
def test(dependency, expected):
|
||||
val = False
|
||||
try:
|
||||
val = evaluateDependency(dependency, termSupported)
|
||||
except ParseException as pe:
|
||||
print(dependency, f'failed parse: {dependency}')
|
||||
except Exception as e:
|
||||
print(dependency, f'failed eval: {dependency}')
|
||||
|
||||
if val == expected:
|
||||
print(f'{dependency} = {val} (as expected)')
|
||||
else:
|
||||
print(f'{dependency} ERROR: {val} != {expected}')
|
||||
|
||||
test('VK_VERSION_1_1+(false,true)', True)
|
||||
test('true', True)
|
||||
test('(true)', True)
|
||||
test('false,false', False)
|
||||
test('false,true', True)
|
||||
test('false+true', False)
|
||||
test('true+true', True)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
@ -658,24 +658,6 @@ class Registry:
|
|||
enumInfo = EnumInfo(enum)
|
||||
self.addElementInfo(enum, enumInfo, 'enum', self.enumdict)
|
||||
|
||||
# Construct a "validextensionstructs" list for parent structures
|
||||
# based on "structextends" tags in child structures
|
||||
disabled_types = []
|
||||
for disabled_ext in self.reg.findall('extensions/extension[@supported="disabled"]'):
|
||||
for type_elem in disabled_ext.findall("*/type"):
|
||||
disabled_types.append(type_elem.get('name'))
|
||||
for type_elem in self.reg.findall('types/type'):
|
||||
if type_elem.get('name') not in disabled_types:
|
||||
# The structure type this may be chained to.
|
||||
struct_extends = type_elem.get('structextends')
|
||||
if struct_extends is not None:
|
||||
for parent in struct_extends.split(','):
|
||||
# self.gen.logMsg('diag', type.get('name'), 'extends', parent)
|
||||
self.validextensionstructs[parent].append(type_elem.get('name'))
|
||||
# Sort the lists so they do not depend on the XML order
|
||||
for parent in self.validextensionstructs:
|
||||
self.validextensionstructs[parent].sort()
|
||||
|
||||
# Parse out all spirv tags in dictionaries
|
||||
# Use addElementInfo to catch duplicates
|
||||
for spirv in self.reg.findall('spirvextensions/spirvextension'):
|
||||
|
|
@ -1002,10 +984,8 @@ class Registry:
|
|||
# expression of extension names.
|
||||
# 'required_key' is used only as a dictionary key at
|
||||
# present, and passed through to the script generators, so
|
||||
# they must be prepared to parse that expression.
|
||||
required_key = require.get('feature')
|
||||
if required_key is None:
|
||||
required_key = require.get('extension')
|
||||
# they must be prepared to parse that boolean expression.
|
||||
required_key = require.get('depends')
|
||||
|
||||
# Loop over types, enums, and commands in the tag
|
||||
for typeElem in require.findall('type'):
|
||||
|
|
@ -1330,7 +1310,7 @@ class Registry:
|
|||
stripped = False
|
||||
for api in attribstring.split(','):
|
||||
##print('Checking API {} referenced by {}'.format(api, key))
|
||||
if supportedDictionary[api].required:
|
||||
if api in supportedDictionary and supportedDictionary[api].required:
|
||||
apis.append(api)
|
||||
else:
|
||||
stripped = True
|
||||
|
|
@ -1354,6 +1334,24 @@ class Registry:
|
|||
genProc = self.gen.genFormat
|
||||
genProc(format, name, alias)
|
||||
|
||||
def tagValidExtensionStructs(self):
|
||||
"""Construct a "validextensionstructs" list for parent structures
|
||||
based on "structextends" tags in child structures.
|
||||
Only do this for structures tagged as required."""
|
||||
|
||||
for typeinfo in self.typedict.values():
|
||||
type_elem = typeinfo.elem
|
||||
if typeinfo.required and type_elem.get('category') == 'struct':
|
||||
struct_extends = type_elem.get('structextends')
|
||||
if struct_extends is not None:
|
||||
for parent in struct_extends.split(','):
|
||||
# self.gen.logMsg('diag', type_elem.get('name'), 'extends', parent)
|
||||
self.validextensionstructs[parent].append(type_elem.get('name'))
|
||||
|
||||
# Sort the lists so they do not depend on the XML order
|
||||
for parent in self.validextensionstructs:
|
||||
self.validextensionstructs[parent].sort()
|
||||
|
||||
def apiGen(self):
|
||||
"""Generate interface for specified versions using the current
|
||||
generator and generator options"""
|
||||
|
|
@ -1525,6 +1523,9 @@ class Registry:
|
|||
self.stripUnsupportedAPIs(self.cmddict, 'successcodes', self.enumdict)
|
||||
self.stripUnsupportedAPIs(self.cmddict, 'errorcodes', self.enumdict)
|
||||
|
||||
# Construct lists of valid extension structures
|
||||
self.tagValidExtensionStructs()
|
||||
|
||||
# @@May need to strip <spirvcapability> / <spirvextension> <enable>
|
||||
# tags of these forms:
|
||||
# <enable version="VK_API_VERSION_1_0"/>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
@ -34,6 +34,11 @@ TYPES_KNOWN_ALWAYS_VALID = set(('char',
|
|||
# Split an extension name into vendor ID and name portions
|
||||
EXT_NAME_DECOMPOSE_RE = re.compile(r'[A-Z]+_(?P<vendor>[A-Z]+)_(?P<name>[\w_]+)')
|
||||
|
||||
# Match an API version name.
|
||||
# This could be refined further for specific APIs.
|
||||
API_VERSION_NAME_RE = re.compile(r'[A-Z]+_VERSION_[0-9]')
|
||||
|
||||
|
||||
class ProseListFormats(Enum):
|
||||
"""A connective, possibly with a quantifier."""
|
||||
AND = 0
|
||||
|
|
@ -443,3 +448,7 @@ class ConventionsBase(abc.ABC):
|
|||
reference pages."""
|
||||
return ''
|
||||
|
||||
def is_api_version_name(self, name):
|
||||
"""Return True if name is an API version name."""
|
||||
|
||||
return API_VERSION_NAME_RE.match(name) is not None
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"""Utility functions not closely tied to other spec_tools types."""
|
||||
# Copyright (c) 2018-2019 Collabora, Ltd.
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -1,7 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<registry>
|
||||
<comment>
|
||||
Copyright 2021-2022 The Khronos Group Inc.
|
||||
Copyright 2021-2023 The Khronos Group Inc.
|
||||
SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
</comment>
|
||||
|
||||
|
|
|
|||
1648
registry/vk.xml
1648
registry/vk.xml
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/python3 -i
|
||||
#
|
||||
# Copyright 2013-2022 The Khronos Group Inc.
|
||||
# Copyright 2013-2023 The Khronos Group Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
|
@ -109,6 +109,8 @@ class VulkanConventions(ConventionsBase):
|
|||
[ r'_H_(26[45])_', r'_H\1_' ],
|
||||
[ r'_VULKAN_([0-9])([0-9])_', r'_VULKAN_\1_\2_' ],
|
||||
[ r'_DIRECT_FB_', r'_DIRECTFB_' ],
|
||||
[ r'_VULKAN_SC_10', r'_VULKAN_SC_1_0' ],
|
||||
|
||||
]
|
||||
|
||||
for subpat in subpats:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue