Compare commits

..

2 Commits

Author SHA1 Message Date
000783c637 fixing the type for new document 2022-12-13 07:09:10 +00:00
5d7224b046 added new documentation entry 2022-12-12 18:51:27 +00:00
24 changed files with 361 additions and 454 deletions

View File

@ -1,3 +1,3 @@
[DEFAULT]
test_path=./otc_metadata/tests/
test_path=./otc-metadata/tests
top_dir=./

View File

@ -28,7 +28,6 @@ def read_data(filename):
with open(filepath, 'r') as fd:
return yaml.safe_load(fd)
def rewrite_data(filename, data):
"""Rewrites data formatting it

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@ import copy
import otc_metadata.data
__all__ = ['Service']
__all__ = ['Docs']
BUILTIN_DATA = otc_metadata.data.read_data('docs.yaml')

View File

@ -40,8 +40,7 @@ class Services(object):
# sort docs list by <service_type>_<title>
self._service_data["documents"] = sorted(
sorted_docs,
key=lambda x: f"{x.get('service_type')}{x.get('title')}"
sorted_docs, key=lambda x: f"{x.get('service_type')}{x.get('title')}"
)
# sort services by <service_type>_<service_title>
self._service_data["services"] = sorted(
@ -184,10 +183,7 @@ class Services(object):
]
if "repositories" in srv and environment:
for repo in srv["repositories"]:
if (
"environment" in repo
and repo["environment"] == environment
):
if "environment" in repo and repo["environment"] == environment:
srv_res["repository"] = repo["repo"]
for doc in self.all_docs:
if (
@ -209,22 +205,3 @@ class Services(object):
doc_struct[srv["service_category"]].append(srv_res)
return dict(categories=doc_struct)
def get_service_with_docs_by_service_type(self, service_type):
"""Retrieve service and service docs by service_type
:param str service_type: Filter by service_type
"""
res = dict()
res['service'] = {}
docs = []
services = self._service_data
for doc in services['documents']:
if doc['service_type'] == service_type:
docs.append(doc)
res['documents'] = docs
for service in services['services']:
if service['service_type'] == service_type:
res['service'] = service
break
return res

View File

@ -104,9 +104,6 @@ html_title = "{{ title }}"
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Do not include sources into the rendered results
html_copy_source = False
# -- Options for PDF output --------------------------------------------------
latex_documents = [
{%- if pdf_name is defined %}

View File

@ -21,28 +21,18 @@ deps =
-r{toxinidir}/requirements.txt
commands = {posargs}
# This env is invoked in the periodic pipeline and is therefore responsible to
# build all relevant docs at once.
[testenv:docs]
deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals =
mkdir
cp
sh
commands =
{%- for doc in docs %}
{[testenv:{{ doc.type }}]commands}
{[testenv:json-{{ doc.type }}]commands}
{%- endfor %}
[testenv:pdf-docs]
[testenv:docs-pdf]
deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals =
rm
whitelist_externals =
mkdir
make
bash
cp
commands =
mkdir -p doc/build/pdf
{%- for doc in docs %}
@ -56,7 +46,7 @@ commands =
# HTML version
[testenv:{{ doc.type }}]
deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals =
whitelist_externals =
cp
mkdir
commands =
@ -75,15 +65,14 @@ commands =
# Json version (for search)
[testenv:json-{{ doc.type }}]
deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals =
whitelist_externals =
cp
mkdir
sh
find
commands =
sphinx-build -W --keep-going -b json {{ loc }}/source doc/build/json/{{ doc.type }}
# Drop data useless for the search - wrap it also with sh/xargs due to bugs
# in tox
sh -c "find doc/build/json -type d -and '(' -name '_images' -or -name '_static' -or -name '_sources' ')' -print0 | xargs -0 rm -rf"
# Drop data useless for the search
find doc/build/json -type d -and ( -name '_images' -or -name '_static' -or -name '_sources' ) -exec rm {:} ;
{%- if doc.type == 'api-ref' %}
mkdir -p api-ref/build/json
cp -av doc/build/json/api-ref api-ref/build/json
@ -99,7 +88,7 @@ commands =
# PDF version
[testenv:{{ doc.type }}-pdf-docs]
deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals =
whitelist_externals =
rm
mkdir
make

View File

@ -0,0 +1,23 @@
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import TestCase
class TestCase(TestCase):
"""Test case base class for all unit tests."""

View File

@ -19,10 +19,10 @@ test_otc-metadata
Tests for `otc-metadata` module.
"""
from unittest import TestCase
from otc-metadata.tests import base
class TestOtcMetadata(TestCase):
class TestOtc-metadata(base.TestCase):
def test_something(self):
pass

View File

@ -1,11 +1,11 @@
[metadata]
name = otc-metadata
summary = Metadata about OTC for Ecosystem
description_file =
description-file =
README.rst
author = Open Telekom Cloud
home_page = https://open.telekom.cloud/
python_requires = >=3.6
home-page = https://open.telekom.cloud/
python-requires = >=3.6
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(

View File

@ -2,6 +2,9 @@
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
stestr>=2.0.0 # Apache-2.0
testtools>=2.2.0 # MIT
flake8
hacking>=3.0,<3.1 # Apache-2.0
coverage>=4.0,!=4.4 # Apache-2.0
python-subunit>=0.0.18 # Apache-2.0/BSD
stestr>=1.0.0 # Apache-2.0
testtools>=1.4.0 # MIT

View File

@ -1,6 +1,2 @@
GitPython
ruamel.yaml
requests
jinja2
dirsync
cookiecutter
ruamel

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,4 @@
import copy
import re
import otc_metadata.services
@ -6,17 +7,21 @@ from ruamel.yaml import YAML
data = otc_metadata.services.Services()
new_data = data._service_data
# services = data.service_dict
#services = data.service_dict
for doc in new_data["documents"]:
hc_location = None
link = doc.get("link")
if link:
print(f"Parsing {link}")
# (p1, p2) = link.split("/")
doc["link"] = re.sub(r"/(.*)/(.*)/", r"/\2/\1/", link)
#(p1, p2) = link.split("/")
doc["link"] = re.sub(
r"/(.*)/(.*)/",
r"/\2/\1/",
link
)
_yaml = YAML()
_yaml.indent(mapping=2, sequence=4, offset=2)
with open("new.yaml", "w") as fd:
with open('new.yaml', 'w') as fd:
_yaml.dump(new_data, fd)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -13,9 +13,7 @@ def main():
_yaml = YAML()
_yaml.indent(mapping=2, sequence=4, offset=2)
sys.stdout.write(
"# Auto-generated by otc_metadata.generate_docexports.data\n"
)
sys.stdout.write("# Auto-generated by otc_metadata.generate_docexports.data\n")
_yaml.dump(data.docs_html_by_category("internal"), sys.stdout)

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
# import copy
import copy
import otc_metadata.services
# from ruamel.yaml import YAML
from ruamel.yaml import YAML
data = otc_metadata.services.Services()

File diff suppressed because it is too large Load Diff

View File

@ -15,15 +15,15 @@
import argparse
import logging
# import os
import os
import pathlib
import requests
import subprocess
# import warnings
import warnings
from git import exc
from git import Repo
# from git import SymbolicReference
from git import SymbolicReference
from ruamel.yaml import CommentedMap
from ruamel.yaml import YAML
@ -72,7 +72,7 @@ def process_repositories(args, service):
workdir.mkdir(exist_ok=True)
copy_to = None
# repo_to = None
repo_to = None
for repo in service["repositories"]:
logging.debug(f"Processing repository {repo}")
@ -130,7 +130,6 @@ def process_repositories(args, service):
zuul_templates = None
zuul_jobs = dict()
zuul_new_jobs = list()
zuul_vars = dict()
zuul_config_updated = False
for item in zuul_config:
if "project" in item.keys():
@ -138,11 +137,10 @@ def process_repositories(args, service):
zuul_templates = project.setdefault("templates", [])
if not zuul_templates:
zuul_templates = []
zuul_vars = project.setdefault("vars", {})
elif "job" in item.keys():
job = item["job"]
zuul_jobs[job["name"]] = job
logging.debug(f"Existing jobs {zuul_jobs}")
print(f"Existing jobs {zuul_jobs}")
if "helpcenter-base-jobs" not in zuul_templates:
zuul_templates.append("helpcenter-base-jobs")
zuul_config_updated = True
@ -150,7 +148,6 @@ def process_repositories(args, service):
job_suffix = (
"-hc-int-jobs" if args.environment == "internal" else "-hc-jobs"
)
sphinx_pdf_files = zuul_vars.setdefault('sphinx_pdf_files', [])
for doc in data.docs_by_service_type(service["service_type"]):
logging.debug(f"Analyzing document {doc}")
if not doc.get("type"):
@ -159,12 +156,6 @@ def process_repositories(args, service):
doc_type = "dev-guide"
else:
doc_type = doc["type"]
# Collect all PDF files into sphinx_pdf_files var
pdf_name = doc.get('pdf_name')
if pdf_name and f"{pdf_name}.pdf" not in sphinx_pdf_files:
sphinx_pdf_files.append(f"{pdf_name}.pdf")
zuul_config_updated = True
template_name = f"{doc_type}{job_suffix}"
if doc_type in ["api-ref", "umn", "dev-guide"]:
if template_name not in zuul_templates:
@ -199,7 +190,6 @@ def process_repositories(args, service):
if "project" in item.keys():
project = item["project"]
project["templates"] = zuul_templates
project["vars"] = zuul_vars
# Ensure new jobs are in check
if len(zuul_new_jobs) > 0:
project.setdefault(
@ -218,7 +208,8 @@ def process_repositories(args, service):
project["check"]["jobs"].extend(
[x["job"]["name"] for x in zuul_new_jobs])
# yaml.indent(offset=2, sequence=2)
#yaml.indent(offset=2, sequence=2)
with open(zuul_file_name, "w") as f:
yaml.dump(zuul_config, f)
git_repo.index.add([zuul_file_name.name])
@ -232,8 +223,7 @@ def process_repositories(args, service):
git_repo.index.commit(
(
"Update zuul.yaml file\n\n"
"Performed-by: gitea/infra/otc-metadata"
"/tools/update_zuul_project_config.py"
"Performed-by: gitea/infra/otc-metadata/tools/update_zuul_project_config.py"
)
)
push_args = ["--set-upstream", "origin", branch_name]

34
tox.ini
View File

@ -1,26 +1,56 @@
[tox]
minversion = 3.2.0
envlist = py3,pep8
envlist = py39,pep8
skipsdist = True
ignore_basepython_conflict = true
[testenv]
basepython = python3
usedevelop = True
setenv =
PYTHONWARNINGS=default::DeprecationWarning
OS_STDOUT_CAPTURE=1
OS_STDERR_CAPTURE=1
OS_TEST_TIMEOUT=60
deps = -r{toxinidir}/test-requirements.txt
commands = stestr run {posargs}
[testenv:lower-constraints]
deps = -r{toxinidir}/test-requirements.txt
[testenv:pep8]
commands = flake8 {posargs}
[testenv:venv]
commands = {posargs}
[testenv:cover]
setenv =
VIRTUAL_ENV={envdir}
PYTHON=coverage run --source otc_metadata --parallel-mode
commands =
stestr run {posargs}
coverage combine
coverage html -d cover
coverage xml -o cover/coverage.xml
[testenv:docs]
deps = -r{toxinidir}/doc/requirements.txt
commands = sphinx-build -W -b html doc/source doc/build/html
[testenv:releasenotes]
deps = {[testenv:docs]deps}
commands =
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
[testenv:debug]
commands = oslo_debug_helper {posargs}
[flake8]
# E123, E125 skipped as they are invalid PEP-8.
show-source = True
ignore = E123,E125,W503
ignore = E123,E125
builtins = _
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build

View File

@ -1,11 +1,19 @@
- project:
merge-mode: squash-merge
default-branch: main
default-branch: master
templates:
- publish-to-pypi
- publish-otc-docs-hc-pti
- release-notes-jobs
check:
jobs:
- otc-tox-pep8
- otc-tox-py38
- otc-tox-py39
- tox-functional
gate:
jobs:
- otc-tox-pep8
- otc-tox-py39
- otc-tox-py38
- otc-tox-py39
- tox-functional