diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 40f1face..6d080f04 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ fail_fast: false default_language_version: python: python3 -exclude: '/ext/|doc/spec/|api/tests/' +exclude: '/ext/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.5.0 diff --git a/api/tests/abi/abi_test_gen.py b/api/tests/abi/abi_test_gen.py index 6e32e40f..621052d5 100644 --- a/api/tests/abi/abi_test_gen.py +++ b/api/tests/abi/abi_test_gen.py @@ -3,72 +3,84 @@ # # # # SPDX-License-Identifier: MIT # ############################################################################*/ - +""" +Generate ABI tests. +""" +# pylint: disable=invalid-name # To run this script: # 1. Install clang # 2. Install clang python binding (pip3 install clang) -# 3. export LD_LIBRARY_PATH= (Macos --> LD_LIBRARY_PATH=/Library/Developer/CommandLineTools/usr/lib) +# 3. export LD_LIBRARY_PATH= import argparse import os import glob -import re import sys import clang.cindex -ignore_list = ['mfxEncryptedData', 'mfxSyncPoint', 'mfxSession', 'mfxLoader', 'mfxConfig'] +ignore_list = [ + 'mfxEncryptedData', 'mfxSyncPoint', 'mfxSession', 'mfxLoader', 'mfxConfig' +] structures = {} + def find_structs(node, mfxStruct): + """Find structures in the headers""" if node.kind == clang.cindex.CursorKind.TYPEDEF_DECL: sp = node.underlying_typedef_type.spelling - if sp.startswith("struct"): # this is trick. Don't know right way to assotiate typedef with referenced node + if sp.startswith( + "struct" + ): # this is trick. Don't know right way to associate typedef with referenced node structures[node.displayname] = node.displayname if mfxStruct in sp: # print(type(node)) # print(node.underlying_typedef_type.kind) fields = [] - for cc in node.walk_preorder() : + for cc in node.walk_preorder(): if cc.kind == clang.cindex.CursorKind.FIELD_DECL: - if("reserved" not in cc.spelling): + if "reserved" not in cc.spelling: fields.append(cc.spelling) - with open("gen__.c", 'w') as gf: + with open("gen__.c", 'w', encoding="utf-8") as gf: original_stdout = sys.stdout sys.stdout = gf print("#include ") print("#include ") print("#include ") print("int main() {") - print("printf(\"MSDK_STATIC_ASSERT_STRUCT_SIZE(" + mfxStruct + ", %lu)\\n\", sizeof(" + mfxStruct + "));") + print("printf(\"MSDK_STATIC_ASSERT_STRUCT_SIZE(" + mfxStruct + + ", %lu)\\n\", sizeof(" + mfxStruct + "));") for f in fields: # MSDK_STATIC_ASSERT_STRUCT_OFFSET(mfxInitializationParam, AccelerationMode ,0) - print("printf(\" MSDK_STATIC_ASSERT_STRUCT_OFFSET(" + mfxStruct + ", " + f + ", %lu)\\n\", offsetof(" + mfxStruct + ", " + f + "));") + print( + "printf(\" MSDK_STATIC_ASSERT_STRUCT_OFFSET(" + + mfxStruct + ", " + f + ", %lu)\\n\", offsetof(" + + mfxStruct + ", " + f + "));") print("return 0;}") sys.stdout = original_stdout - + print("Done") - exit(0) + sys.exit(0) # Recurse for children of this node for c in node.get_children(): find_structs(c, mfxStruct) -parser = argparse.ArgumentParser(description='Searches for the structs with missed ABI backward compatibility tests.') +parser = argparse.ArgumentParser( + description= + 'Searches for the structs with missed ABI backward compatibility tests.') parser.add_argument('-i', '--ifolder', help='Input folder with header files.') parser.add_argument('-s', '--structure', help='Name of the structure.') args = parser.parse_args() -errCount = 0 -state = 'search' -for filename in glob.glob(os.path.join(args.ifolder,'*.h')): - print ("Parsing of " + filename) +for filename in glob.glob(os.path.join(args.ifolder, '*.h')): + print("Parsing of " + filename) index = clang.cindex.Index.create() tu = index.parse(filename, ["-DONEVPL_EXPERIMENTAL"]) find_structs(tu.cursor, args.structure) print("Structure definition wasn't located: " + args.structure) -exit(-1) +sys.exit(-1) diff --git a/api/tests/abi/check_missing_structs.py b/api/tests/abi/check_missing_structs.py index f8b8f0e3..796dc95c 100644 --- a/api/tests/abi/check_missing_structs.py +++ b/api/tests/abi/check_missing_structs.py @@ -3,12 +3,15 @@ # # # # SPDX-License-Identifier: MIT # ############################################################################*/ - +""" +Check for structures which are not present in ABI tests. +""" +# pylint: disable=invalid-name # To run this script: # 1. Install clang # 2. Install clang python binding (pip3 install clang) -# 3. export LD_LIBRARY_PATH= (Macos --> LD_LIBRARY_PATH=/Library/Developer/CommandLineTools/usr/lib) +# 3. export LD_LIBRARY_PATH= import argparse import os @@ -17,21 +20,29 @@ import sys import clang.cindex -ignore_list = ['mfxEncryptedData', 'mfxSyncPoint', 'mfxSession', 'mfxLoader', 'mfxConfig'] +ignore_list = [ + 'mfxEncryptedData', 'mfxSyncPoint', 'mfxSession', 'mfxLoader', 'mfxConfig' +] structures = {} + def find_structs(node): + """Find structures in the headers""" if node.kind == clang.cindex.CursorKind.TYPEDEF_DECL: sp = node.underlying_typedef_type.spelling - if sp.startswith("struct"): # this is trick. Don't know right way to assotiate typedef with referenced node + if sp.startswith( + "struct" + ): # this is trick. Don't know right way to assotiate typedef with referenced node structures[node.displayname] = node.displayname # Recurse for children of this node for c in node.get_children(): find_structs(c) -parser = argparse.ArgumentParser(description='Searches for the structs with missed ABI backward compatibility tests.') +parser = argparse.ArgumentParser( + description= + 'Searches for the structs with missed ABI backward compatibility tests.') parser.add_argument('-i', '--ifolder', help='Input folder with header files.') parser.add_argument('-t', '--test_file', help='Input file with ABI tests.') @@ -39,8 +50,8 @@ def find_structs(node): errCount = 0 state = 'search' -for filename in glob.glob(os.path.join(args.ifolder,'*.h')): - print ("Parsing of " + filename) +for filename in glob.glob(os.path.join(args.ifolder, '*.h')): + print("Parsing of " + filename) index = clang.cindex.Index.create() tu = index.parse(filename, ["-DONEVPL_EXPERIMENTAL"]) find_structs(tu.cursor) @@ -48,12 +59,12 @@ def find_structs(node): print(structures) print("Found structures: {}".format(len(structures))) -print ("Parsing of test file: " + args.test_file) +print("Parsing of test file: " + args.test_file) -with open(args.test_file, 'r') as file: +with open(args.test_file, 'r', encoding="utf-8") as file: data = file.read() for s in structures: - if not re.search(s,data): + if not re.search(s, data): if s not in ignore_list: print("struct {} missed".format(s)) errCount = errCount + 1 @@ -63,4 +74,4 @@ def find_structs(node): else: print("Failed with {} errors.".format(errCount)) -exit(-errCount) +sys.exit(-errCount) diff --git a/api/tests/abi/check_pack.py b/api/tests/abi/check_pack.py index 0724ef39..3a0bc637 100644 --- a/api/tests/abi/check_pack.py +++ b/api/tests/abi/check_pack.py @@ -3,64 +3,93 @@ # # # # SPDX-License-Identifier: MIT # ############################################################################*/ +""" +Check for expected structure packing. +""" +# pylint: disable=invalid-name import argparse import os import glob import re +import sys -typedefs_with_ptr_list = ['mfxHDL', 'mfxThreadTask', 'mfxEncryptedData', 'mfxSyncPoint', 'mfxSession', 'mfxLoader', 'mfxConfig'] +typedefs_with_ptr_list = [ + 'mfxHDL', 'mfxThreadTask', 'mfxEncryptedData', 'mfxSyncPoint', + 'mfxSession', 'mfxLoader', 'mfxConfig' +] -def CheckPtr(line): - newline = re.sub(r" *", '', line) # trim spaces. can be embedded into regexp below but will meake it more complex for reader - result = re.search(r"\S+\*+\S+;",newline) # search for the pointer declaration like "xxx*yyy;" - if result: - return 1 - return 0 -def CheckTypedefWithPtr(line): - for typedef in typedefs_with_ptr_list: - result = re.search(typedef,newline) # search for the field with the type defines through the typedef - if result: - return 1 - return 0 +def CheckPtr(p_line): + """Check pointer""" + p_newline = re.sub( + r" *", '', p_line + ) # trim spaces. can be embedded into regexp below but will meake it more complex for reader + p_result = re.search( + r"\S+\*+\S+;", + p_newline) # search for the pointer declaration like "xxx*yyy;" + if p_result: + return 1 + return 0 + + +def CheckTypedefWithPtr(_line): + """Check typedef""" + for typedef in typedefs_with_ptr_list: + p_result = re.search( + typedef, newline + ) # search for the field with the type defines through the typedef + if p_result: + return 1 + return 0 + checkers = [CheckPtr, CheckTypedefWithPtr] -parser = argparse.ArgumentParser(description='Check that structures with pointers have correct pragme pack.') +parser = argparse.ArgumentParser( + description='Check that structures with pointers have correct pragme pack.' +) parser.add_argument('-i', '--ifolder', help='Input folder with header files.') args = parser.parse_args() errCount = 0 state = 'search' -for filename in glob.glob(os.path.join(args.ifolder,'*.h')): - with open(filename, 'r') as f: - print ("Parsing of " + filename) - Lines = f.readlines() - for line in Lines: - result = re.search(r"^\s*MFX_PACK_BEGIN_USUAL_STRUCT", line) - if result and state == "search": - state = 'inmacro' - continue - result = re.search(r"^\s*MFX_PACK_END", line) - if result and state == "inmacro": - state = 'search' - continue - - if state == 'inmacro': - newline = re.sub(r"\/\*.*\*\/", '', line) # remove /* */ comment within the single line - newline = re.sub(r"/\*.*", '', newline) # remove begin of the comment when we have multi line comments - newline = re.sub(r".*\*\/$", '', newline) # remove end of the comment when we have multi line comments - for checker in checkers: - r = checker(newline) - if(r): - print("Line{}: {}".format(errCount, newline.strip())) # wrong pack of the structure is found. structure with pointers is wrongly packed. - errCount += 1 +for filename in glob.glob(os.path.join(args.ifolder, '*.h')): + with open(filename, 'r', encoding="utf-8") as f: + print("Parsing of " + filename) + Lines = f.readlines() + for line in Lines: + result = re.search(r"^\s*MFX_PACK_BEGIN_USUAL_STRUCT", line) + if result and state == "search": + state = 'inmacro' + continue + result = re.search(r"^\s*MFX_PACK_END", line) + if result and state == "inmacro": + state = 'search' + continue + + if state == 'inmacro': + newline = re.sub( + r"\/\*.*\*\/", '', + line) # remove /* */ comment within the single line + newline = re.sub( + r"/\*.*", '', newline + ) # remove begin of the comment when we have multi line comments + newline = re.sub( + r".*\*\/$", '', newline + ) # remove end of the comment when we have multi line comments + for checker in checkers: + r = checker(newline) + if r: + print( + "Line{}: {}".format(errCount, newline.strip()) + ) # wrong pack is found. structure with pointers is wrongly packed. + errCount += 1 if errCount == 0: print("Passed") else: print("Failed with {} errors.".format(errCount)) -exit(-errCount) +sys.exit(-errCount) diff --git a/api/tests/abi/mfx_static_assert_structs.cpp b/api/tests/abi/mfx_static_assert_structs.cpp index c1fc630d..47dc876d 100644 --- a/api/tests/abi/mfx_static_assert_structs.cpp +++ b/api/tests/abi/mfx_static_assert_structs.cpp @@ -7,8 +7,8 @@ #include #include -#include "mfx.h" -#include "mfxcamera.h" +#include "./mfx.h" +#include "./mfxcamera.h" /* .cpp instead of .h to avoid changing of include files dependencies graph and not to include unnecessary includes into libmfx library */ diff --git a/api/tests/compile_headers/removed_api.h b/api/tests/compile_headers/removed_api.h index 84e08d44..068f1d78 100644 --- a/api/tests/compile_headers/removed_api.h +++ b/api/tests/compile_headers/removed_api.h @@ -10,10 +10,10 @@ * removed API entry - please keep original name of the header file. */ -#ifndef __REMOVED_API_H__ -#define __REMOVED_API_H__ +#ifndef API_TESTS_COMPILE_HEADERS_REMOVED_API_H_ +#define API_TESTS_COMPILE_HEADERS_REMOVED_API_H_ -#include "mfxstructures.h" +#include "./mfxstructures.h" /* Don't expose this file publically without special approval. */ @@ -36,4 +36,4 @@ enum { /* Functions*/ -#endif /* __REMOVED_API_H__ */ +#endif /* API_TESTS_COMPILE_HEADERS_REMOVED_API_H_ */ diff --git a/api/tests/compile_headers/sdk_headers.inc b/api/tests/compile_headers/sdk_headers.inc index 5c91f971..124eeef1 100644 --- a/api/tests/compile_headers/sdk_headers.inc +++ b/api/tests/compile_headers/sdk_headers.inc @@ -1,3 +1,9 @@ +/*############################################################################ + # Copyright (C) 2020 Intel Corporation + # + # SPDX-License-Identifier: MIT + ############################################################################*/ + #pragma message ("Add headers to test begin") #include "mfx.h" diff --git a/api/tests/compile_headers/test.c b/api/tests/compile_headers/test.c index 778fbb43..b514f838 100644 --- a/api/tests/compile_headers/test.c +++ b/api/tests/compile_headers/test.c @@ -10,7 +10,7 @@ #include -#include "removed_api.h" +#include "./removed_api.h" #include "sdk_headers.inc" diff --git a/api/tests/compile_headers/test.cpp b/api/tests/compile_headers/test.cpp index 2b1dd8f2..11301395 100644 --- a/api/tests/compile_headers/test.cpp +++ b/api/tests/compile_headers/test.cpp @@ -27,9 +27,9 @@ typedef struct { static_assert(offsetof(PackStart, a) == 0, "Error - offset of PackStart.a should be 0"); static_assert(offsetof(PackStart, b) == 1, "Error - offset of PackStart.b should be 1"); -#include "removed_api.h" +#include "./removed_api.h" -#include "mfxvideo++.h" +#include "./mfxvideo++.h" #include "sdk_headers.inc" diff --git a/api/tests/doc_examples/dummy_impl.c b/api/tests/doc_examples/dummy_impl.c index 69896e76..8f7ebfbe 100644 --- a/api/tests/doc_examples/dummy_impl.c +++ b/api/tests/doc_examples/dummy_impl.c @@ -4,14 +4,14 @@ # SPDX-License-Identifier: MIT ############################################################################*/ -#include "mfxdefs.h" -#include "mfxcommon.h" -#include "mfxstructures.h" -#include "mfxdispatcher.h" -#include "mfximplcaps.h" -#include "mfxjpeg.h" -#include "mfxvideo.h" -#include "mfxadapter.h" +#include "./mfxdefs.h" +#include "./mfxcommon.h" +#include "./mfxstructures.h" +#include "./mfxdispatcher.h" +#include "./mfximplcaps.h" +#include "./mfxjpeg.h" +#include "./mfxvideo.h" +#include "./mfxadapter.h" #define UNUSED_PARAM(x) (void)(x) @@ -70,7 +70,7 @@ mfxHDL* MFX_CDECL MFXQueryImplsDescription(mfxImplCapsDeliveryFormat format, mfx { UNUSED_PARAM(format); UNUSED_PARAM(num); - return (mfxHDL*)1; + return (mfxHDL*)(1); //NOLINT } mfxStatus MFX_CDECL MFXReleaseImplDescription(mfxHDL hdl) diff --git a/api/tests/docker/Dockerfile.clang b/api/tests/docker/Dockerfile.clang index 7a83e046..751a96a9 100644 --- a/api/tests/docker/Dockerfile.clang +++ b/api/tests/docker/Dockerfile.clang @@ -1,3 +1,9 @@ +# ============================================================================== +# Copyright (C) Intel Corporation +# +# SPDX-License-Identifier: MIT +# ============================================================================== + FROM ubuntu:20.04 ARG CLANG_VER=12 diff --git a/api/tests/docker/Dockerfile.gcc b/api/tests/docker/Dockerfile.gcc index f6c6b5e1..1d039e56 100644 --- a/api/tests/docker/Dockerfile.gcc +++ b/api/tests/docker/Dockerfile.gcc @@ -1,3 +1,9 @@ +# ============================================================================== +# Copyright (C) Intel Corporation +# +# SPDX-License-Identifier: MIT +# ============================================================================== + FROM ubuntu:20.04 RUN apt-get update \ diff --git a/doc/spec/build-spec.py b/doc/spec/build-spec.py index 250a9180..d57bf2a2 100644 --- a/doc/spec/build-spec.py +++ b/doc/spec/build-spec.py @@ -4,6 +4,10 @@ # # SPDX-License-Identifier: MIT ############################################################################ +""" +Build specification. +""" +# pylint: disable=invalid-name # Main script to build the specification. Derived from: # github.com/oneapi-src/oneAPI-spec/blob/main/scripts/oneapi.py @@ -12,8 +16,9 @@ import glob import os import os.path +import sys import shutil -import subprocess +import subprocess # nosec B404 from functools import wraps from os.path import join @@ -24,12 +29,14 @@ doxygen_xml = join(doxygen_dir, 'xml', 'index.xml') indent = 0 +cl_args = [] def action(func): + """Execute and log action""" @wraps(func) def wrapped(*args, **kwargs): - global indent + global indent # pylint: disable=W0603 log('%s: %s' % ( args[1] if len(args) > 1 and args[1] else wrapped.__name__, args[0], @@ -45,6 +52,7 @@ def wrapped(*args, **kwargs): class cd: """Context manager for changing the current working directory""" def __init__(self, newPath): + self.savedPath = "" self.newPath = os.path.expanduser(newPath) def __enter__(self): @@ -57,27 +65,20 @@ def __exit__(self, etype, value, traceback): def log(*args, **kwargs): + """Log message""" print(indent * ' ' + ' '.join(map(str, args)), flush=True, **kwargs) def shell(c): + """Execute shell command""" log(c) if cl_args.dry_run: return - subprocess.check_call(c, shell=True) - - -def rm(dir): - log('rm -rf', dir) - if cl_args.dry_run: - return - shutil.rmtree(dir, ignore_errors=True) - if cl_args.dry_run: - return - shutil.copy(src, dst) + subprocess.check_call(c, shell=True) # nosec B602 def copy(src, dst): + """Execute copy command""" log('cp', src, dst) if cl_args.dry_run: return @@ -85,6 +86,7 @@ def copy(src, dst): def makedirs(path): + """Execute mkdir command""" log('mkdir -p', path) if cl_args.dry_run: return @@ -92,6 +94,7 @@ def makedirs(path): def sphinx(root, target): + """Execute sphinx build""" if not cl_args.verbose: os.environ['LATEXMKOPTS'] = '--silent' os.environ['LATEXOPTS'] = '-interaction=nonstopmode -halt-on-error' @@ -117,18 +120,20 @@ def sphinx(root, target): def get_env(var): + """Get environment variable""" return os.environ[var] if var in os.environ else '' def root_only(root): + """Check if in root dir""" if root != '.': - exit('Error: Only works from root') + sys.exit('Error: Only works from root') @action -def dockerbuild(root, target=None): +def dockerbuild(root, _target=None): + """Build image""" cd(root) - os.system("pwd") copy(join(root, 'requirements.txt'), join(root, 'docker')) copy(join(root, 'ubuntu-packages.txt'), join(root, 'docker')) copy(join(root, 'install.sh'), join(root, 'docker')) @@ -143,7 +148,8 @@ def dockerbuild(root, target=None): @action -def dockerrun(root, target=None): +def dockerrun(_root, _target=None): + """Run image""" shell('docker run --rm -it' ' -e http_proxy=%s' ' -e https_proxy=%s' @@ -162,23 +168,27 @@ def dockerrun(root, target=None): @action -def clean(root, target=None): +def clean(root, _target=None): + """Execute sphinx clean""" apply_dirs(root, 'clean') sphinx(root, 'clean') def command(root, target): + """Execute command list""" commands[target](root, target) def apply_dirs(root, target): + """Execute command list""" elements = join(root, 'doc', 'spec') if os.path.exists(elements): - for dir in dirs: - command(join(elements, dir), target) + for curr_dir in dirs: + command(join(elements, curr_dir), target) def up_to_date(target, deps): + """Check if up to date""" if not os.path.exists(target): return False for dep in deps: @@ -188,11 +198,13 @@ def up_to_date(target, deps): def doxygen_files(root): + """Get doxyfile""" return [join(root, 'Doxyfile')] + glob.glob(join('api', 'vpl', '**'), recursive=True) -def doxygen(root, target=None): +def doxygen(root, _target=None): + """Execute doxygen build""" with cd(root): doxyfile = 'Doxyfile' if not os.path.exists(doxyfile) or up_to_date(join(root, doxygen_xml), @@ -202,18 +214,21 @@ def doxygen(root, target=None): @action -def prep(root='.', target=None): +def prep(root='.', _target=None): + """Prepare doxygen build""" apply_dirs(root, 'prep') doxygen(root) @action def build(root, target): + """Build sphinx""" prep(root) sphinx(root, target) def remove_elements(li, elements): + """Remove elements""" for e in elements: if e in li: li.remove(e) @@ -221,10 +236,13 @@ def remove_elements(li, elements): @action -def sort_words(root, target=None): - with open(join('source', 'spelling_wordlist.txt')) as fin: +def sort_words(_root, _target=None): + """Sort words in spell check""" + with open(join('source', 'spelling_wordlist.txt'), + encoding="utf-8") as fin: lines = fin.readlines() - with open(join('source', 'spelling_wordlist.txt'), 'w') as fout: + with open(join('source', 'spelling_wordlist.txt'), 'w', + encoding="utf-8") as fout: for li in sorted(list(set(lines))): fout.write(li) @@ -247,7 +265,8 @@ def sort_words(root, target=None): def main(): - global cl_args + """Main specification build""" + global cl_args # pylint: disable=W0603 parser = argparse.ArgumentParser( description='Build Intel® Video Processing Library spec.') parser.add_argument('action', diff --git a/doc/spec/common_conf.py b/doc/spec/common_conf.py index 6f68bf62..e34e401a 100644 --- a/doc/spec/common_conf.py +++ b/doc/spec/common_conf.py @@ -4,24 +4,17 @@ # # SPDX-License-Identifier: MIT ############################################################################ +""" +Common configuration for building specification. +""" +# pylint: disable=invalid-name # Derived from: # github.com/oneapi-src/oneAPI-spec/blob/main/source/conf/common_conf.py -import json import string -import sys -from os import environ -from os.path import abspath, join - import docutils -from pygments import token -from pygments.lexer import RegexLexer -from sphinx.highlighting import lexers - -import vplapi - # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # @@ -52,8 +45,7 @@ 'vpl_spec_version': "2.10.0", } -prolog_template = string.Template( - r""" +prolog_template = string.Template(r""" .. |vpl_full_name| replace:: Intel\ :supsub:`reg` Video Processing Library .. |vpl_short_name| replace:: Intel\ :supsub:`reg` VPL @@ -61,13 +53,11 @@ .. include:: .. |regsup| replace:: :supsub:`reg` .. |intel_r| replace:: Intel\ :supsub:`reg` -.. |msdk_full_name| replace:: Intel\ :supsub:`reg`""" - + ' Media Software Development Kit' -) +.. |msdk_full_name| replace:: Intel\ :supsub:`reg`""" + + ' Media Software Development Kit') rst_prolog = prolog_template.substitute(env) - # for substitutions in code blocks and sphinx-prompts: substitutions = [] @@ -198,7 +188,8 @@ 'diffweightslayer': '\\operatorname{diff\\_weights\\_layer}', 'diffweightsiter': '\\operatorname{diff\\_weights\\_iter}', 'diffweightspeephole': '\\operatorname{diff\\_weights\\_peephole}', - 'diffweightsprojection': '\\operatorname{diff\\_weights\\_projection}', # noqa: E501 + 'diffweightsprojection': + '\\operatorname{diff\\_weights\\_projection}', # noqa: E501 'diffbias': '\\operatorname{diff\\_bias}', 'diffdst': '\\operatorname{diff\\_dst}', 'diffdstlayer': '\\operatorname{diff\\_dst\\_layer}', @@ -213,35 +204,21 @@ } -def supsub_role(name, rawtext, text, lineno, inliner, options={}, content=[]): +def supsub_role(_name, + _rawtext, + text, + _lineno, + _inliner, + _options=None, + _content=None): + """Superscript substitution""" node = docutils.nodes.superscript() node2 = docutils.nodes.substitution_reference(refname=text) node += [node2] return [node], [] -class BCLLexer(RegexLexer): - name = 'regexp' - - tokens = { - 'root': [ - (r'MyKeyword', token.Keyword), - (r'[a-zA-Z]', token.Name), - (r'\s', token.Text), - ] - } - - -# oneart/oidn uses regexp code tag so make a faker lexer to avoid -# warnings -lexers['regexp'] = BCLLexer(startinline=True) - -# -- Watermark - -sphinxmark_enable = 'ONEAPI_DRAFT' in environ -sphinxmark_div = 'row#main-content' - - def setup(app): + """Setup""" app.add_role('supsub', supsub_role) app.add_css_file('custom.css') diff --git a/doc/spec/element_conf.py b/doc/spec/element_conf.py index 08d8082d..21516a9d 100644 --- a/doc/spec/element_conf.py +++ b/doc/spec/element_conf.py @@ -4,6 +4,10 @@ # # SPDX-License-Identifier: MIT ############################################################################ +""" +Configuration file for the Sphinx documentation builder. +""" +# pylint: disable=invalid-name # Derived from: # github.com/oneapi-src/oneAPI-spec/blob/main/source/conf/element_conf.py @@ -25,13 +29,14 @@ # -- Project information ----------------------------------------------------- -copyright = u'2021, Intel' +# Included in page footers. ''project_copyright'' is an allowed alias to +# 'copyright' to avoid conflict with built-in keyword +project_copyright = '2023, Intel' # The short X.Y version -version = u'' +version = '' # The full version, including alpha/beta/rc tags -release = u'' - +release = '' # -- General configuration --------------------------------------------------- @@ -39,7 +44,6 @@ # # needs_sphinx = '1.0' - # Add any paths that contain templates here, relative to this directory. templates_path = ['../../../_templates'] @@ -60,7 +64,6 @@ # The name of the Pygments (syntax highlighting) style to use. pygments_style = None - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -77,7 +80,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# +# # html_static_path = ['../../../_static'] # Custom sidebar templates, must be a dictionary that maps document names @@ -93,22 +96,21 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = project + '-spec' # noqa: F821 - +htmlhelp_basename = project + '-spec' # pylint: disable=E0602 # -- Options for LaTeX output ------------------------------------------------ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). -pro = project.translate(project.maketrans(' ', '-')) # noqa: F821 +pro = project.translate(project.maketrans(' ', '-')) # pylint: disable=E0602 file_name = f'{pro}-spec.tex' latex_documents = [ ( master_doc, file_name, - project + u' Specification', # noqa: F821 - u'Intel', + project + ' Specification', # pylint: disable=E0602 + 'Intel', 'manual', ), ] @@ -116,7 +118,7 @@ # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. -epub_title = project # noqa: F821 +epub_title = project # pylint: disable=E0602 # The unique identifier of the text. This can be a ISBN number # or the project homepage. @@ -130,7 +132,6 @@ # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] - # -- Extension configuration ------------------------------------------------- # -- Options for todo extension ---------------------------------------------- @@ -140,5 +141,5 @@ # -- Options for breathe extension ------------------------------------------- -breathe_projects = {'DEF_BREATHE_PROJECT': '../doxygen/xml'} # noqa: F821 +breathe_projects = {'DEF_BREATHE_PROJECT': '../doxygen/xml'} # pylint: disable=E0602 breathe_default_project = 'none' diff --git a/doc/spec/source/conf.py b/doc/spec/source/conf.py index 749b8560..0f839dba 100644 --- a/doc/spec/source/conf.py +++ b/doc/spec/source/conf.py @@ -1,6 +1,10 @@ # SPDX-FileCopyrightText: 2019-2020 Intel Corporation # # SPDX-License-Identifier: MIT +""" +Main configuration file for specification build. +""" +# pylint: disable=invalid-name,exec-used # For Intel® Video Processing Library (Intel® VPL) @@ -18,7 +22,6 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -import os import sys from os.path import join @@ -26,11 +29,11 @@ repo_root = '..' sys.path.append(repo_root) -exec(open(join(repo_root, 'common_conf.py')).read()) -exec(open(join(repo_root, 'element_conf.py')).read()) +exec(open(join(repo_root, 'common_conf.py'), encoding="utf-8").read()) # nosec # pylint: disable=R1732 +exec(open(join(repo_root, 'element_conf.py'), encoding="utf-8").read()) # nosec # pylint: disable=R1732 cpp_id_attributes = ['MFX_CDECL', 'MFX_DEPRECATED'] c_id_attributes = ['MFX_CDECL', 'MFX_DEPRECATED'] -spelling_word_list_filename=['../spelling_wordlist.txt'] +spelling_word_list_filename = ['../spelling_wordlist.txt'] diff --git a/doc/spec/source/snippets/.clang-format b/doc/spec/source/snippets/.clang-format new file mode 100644 index 00000000..ef2ae21f --- /dev/null +++ b/doc/spec/source/snippets/.clang-format @@ -0,0 +1,4 @@ +--- +DisableFormat: true +SortIncludes: false +... diff --git a/doc/spec/source/snippets/CPPLINT.cfg b/doc/spec/source/snippets/CPPLINT.cfg new file mode 100644 index 00000000..d6cb8cd2 --- /dev/null +++ b/doc/spec/source/snippets/CPPLINT.cfg @@ -0,0 +1 @@ +exclude_files=. \ No newline at end of file diff --git a/doc/spec/source/snippets/prg_config.cpp b/doc/spec/source/snippets/prg_config.cpp index 6fa78d0b..b95365cf 100644 --- a/doc/spec/source/snippets/prg_config.cpp +++ b/doc/spec/source/snippets/prg_config.cpp @@ -1,3 +1,9 @@ +/*############################################################################ + # Copyright (C) 2020 Intel Corporation + # + # SPDX-License-Identifier: MIT + ############################################################################*/ + // Example using Intel® Video Processing Library (Intel® VPL) #include diff --git a/doc/spec/source/snippets/prg_disp.c b/doc/spec/source/snippets/prg_disp.c index 6424c7ca..5f536aa6 100644 --- a/doc/spec/source/snippets/prg_disp.c +++ b/doc/spec/source/snippets/prg_disp.c @@ -1,4 +1,3 @@ - /*############################################################################ # Copyright (C) 2021 Intel Corporation # diff --git a/doc/spec/source/snippets/prg_encoding.c b/doc/spec/source/snippets/prg_encoding.c index b1de95b9..fd599c94 100644 --- a/doc/spec/source/snippets/prg_encoding.c +++ b/doc/spec/source/snippets/prg_encoding.c @@ -1,4 +1,3 @@ - /*############################################################################ # Copyright (C) 2020 Intel Corporation # diff --git a/doc/spec/source/snippets/prg_mem.c b/doc/spec/source/snippets/prg_mem.c index d30be067..c920aaac 100644 --- a/doc/spec/source/snippets/prg_mem.c +++ b/doc/spec/source/snippets/prg_mem.c @@ -1,3 +1,9 @@ +/*############################################################################ + # Copyright (C) 2020 Intel Corporation + # + # SPDX-License-Identifier: MIT + ############################################################################*/ + #include #include "mfxdefs.h" diff --git a/doc/spec/source/snippets/prg_surface_sharing.cpp b/doc/spec/source/snippets/prg_surface_sharing.cpp index 972fa0f1..92eee090 100644 --- a/doc/spec/source/snippets/prg_surface_sharing.cpp +++ b/doc/spec/source/snippets/prg_surface_sharing.cpp @@ -1,3 +1,9 @@ +/*############################################################################ + # Copyright (C) 2020 Intel Corporation + # + # SPDX-License-Identifier: MIT + ############################################################################*/ + // Example using Intel® Video Processing Library (Intel® VPL) #include diff --git a/doc/spec/source/snippets/prg_transcoding.c b/doc/spec/source/snippets/prg_transcoding.c index 33fcf490..859bccd7 100644 --- a/doc/spec/source/snippets/prg_transcoding.c +++ b/doc/spec/source/snippets/prg_transcoding.c @@ -1,3 +1,9 @@ +/*############################################################################ + # Copyright (C) 2020 Intel Corporation + # + # SPDX-License-Identifier: MIT + ############################################################################*/ + #include #include "mfxdefs.h" diff --git a/doc/spec/vplapi.py b/doc/spec/vplapi.py deleted file mode 100644 index ef47c99f..00000000 --- a/doc/spec/vplapi.py +++ /dev/null @@ -1,76 +0,0 @@ -# SPDX-FileCopyrightText: 2020 Intel Corporation -# -# SPDX-License-Identifier: MIT - -__version__ = '0.0.1' - -import re - -from docutils import nodes -from docutils.parsers.rst import Directive - -from sphinx.locale import _ -from sphinx.util.docutils import SphinxDirective - -from sphinx.util import logging - -logger = logging.getLogger(__name__) - -def type_of_node(n): - return type(n).__name__ - -# --> def_node -# --> def_node[0] -# Since --> def_node[0][0].astext() -# -# --> def_node[1] -# her 1.0 her. --> def_node[1][0].astext() -# -# - -def check_definition_list_item(def_node): - if type_of_node(def_node[0]) == 'term' and type_of_node(def_node[1]) == 'definition': - if(def_node[0][0].astext() == 'Since'): - potential_ver_str = def_node[1][0].astext() - potential_m = re.search(r'since API version (\d+\.\d+)\.',potential_ver_str) - if potential_m: - return potential_m.group(1) - - return None - -def process_function_nodes(app, doctree, fromdocname): - if fromdocname.find("VPL_") >= 0: - for section in doctree.traverse(nodes.section, descend=True): - for n in section: - type_of_section = type_of_node(n) - if type_of_section == 'title': - title_text = n[0] - if(title_text.startswith("MFX")): # this is function description section - # lets search for the `Since` term - isFunction = False - hasVersion = False - version = None - functionName = title_text - for k in n.traverse(condition=None, include_self=True, descend=True, siblings=True): - node_type = type_of_node(k) - if node_type == 'desc': - if k.hasattr('desctype'): - if k.get('desctype') == 'function': - isFunction = True - else: - continue - if node_type == 'definition_list_item': - tmp = check_definition_list_item(k) - if tmp: - version = tmp - hasVersion = True - if isFunction and not hasVersion: - logger.warning("Function {} doesn't have min API version defined".format(functionName)) - -def setup(app): - app.connect('doctree-resolved', process_function_nodes) - return { - 'version': __version__, - 'parallel_read_safe': True, - 'parallel_write_safe': True - }