diff --git a/.github/actions/generate_types/generate.py b/.github/actions/generate_types/generate.py new file mode 100644 index 000000000..d468ddeda --- /dev/null +++ b/.github/actions/generate_types/generate.py @@ -0,0 +1,255 @@ +# +# Copyright (c) 2021 Red Hat, Inc. +# This program and the accompanying materials are made +# available under the terms of the Eclipse Public License 2.0 +# which is available at https://www.eclipse.org/legal/epl-2.0/ +# +# SPDX-License-Identifier: EPL-2.0 +# +# Contributors: +# Red Hat, Inc. - initial API and implementation +# + +import os +import json +import yaml +import requests +from collections import OrderedDict + +def write_json(filename: str, object: dict) -> None: + """ + Write the json object to the specified filename + """ + with open(filename, 'w') as out: + json.dump(object, out, sort_keys=False, indent=2, separators=(',', ': '), ensure_ascii=True) + +def create_ref(path): + """ + Create a json definition reference to a specific path + """ + return '#/definitions/' + path + +def consolidate_crds() -> object: + """ + Consolidate all crds in /crds into one json object + """ + crds_dir = os.path.join('crds') + crds = os.listdir(crds_dir) + consolidated_crds_json = { + 'definitions': {}, + } + for file in crds: + crd_file_path = os.path.join(crds_dir, file) + with open(crd_file_path) as file: + yamlData = yaml.load(file, Loader=yaml.FullLoader) + crd_name = yamlData['spec']['names']['kind'] + + # Add all the available schema versions + for version in yamlData['spec']['versions']: + new_json_name = version['name'] + '.' + crd_name + new_schema = version['schema']['openAPIV3Schema'] + consolidated_crds_json['definitions'][new_json_name] = new_schema + + return consolidated_crds_json + +def add_property_definition(root_definitions_object: dict, current_path: str, curr_object: dict, queue: list) -> None: + """ + Given an object, convert the child properties into references with new definitions at the root of root_definitions_object. + Also removes oneOf references since they aren't supported by openapi-generator. + + Converts: + { + "properties": { + "foo": { + "type": "object", + "properties": { + "bar": { + "type": "string" + } + } + } + } + } + + into: + { + "definitions": { + "foo": { + "type": "object", + "properties": { + "bar": { + "$ref": "#/definitions/bar" + } + } + }, + "bar": { + "type": string + } + } + } + """ + for prop in curr_object['properties']: + new_path = current_path + '.' + prop + new_object = curr_object['properties'][prop] + + # openapi-generator doesn't accept oneOf so we have to remove them + if 'oneOf' in new_object: + del new_object['oneOf'] + + root_definitions_object[new_path] = new_object + + # openapi-generator doesn't accept oneOf so we have to remove them + if 'items' in new_object: + if 'oneOf' in new_object['items']: + del new_object['items']['oneOf'] + new_path += ".items" + + queue.append({ + new_path: new_object + }) + curr_object['properties'][prop] = { + '$ref': create_ref(new_path) + } + +def add_item_definition(root_definitions_object: dict, current_path: str, curr_object: dict, queue: list) -> None: + """ + Given an object, convert the child properties into references with new definitions at the root of root_definitions_object. + Also removes oneOf references since they aren't supported by openapi-generator. + + Converts: + { + "v1devworkspace": { + "properties": { + "spec": { + "items": { + "type": "object", + "properties": { + "foo": { + "type": "string", + "description": "Type of funding or platform through which funding is possible." + }, + } + }, + "type": "array" + } + } + + } + } + + into: + { + "definitions": { + "v1devworkspace": { + "properties": { + "spec": { + "$ref": "#/definitions/v1devworkspace.spec.items" + } + } + }, + "v1devworkspace.spec.items": { + "items": { + "$ref": "#/definitions/v1devworkspace.spec" + }, + "type": "array" + }, + "v1devworkspace.spec": { + "type": "object", + "properties": { + "foo": { + "$ref": "#/definitions/v1devworkspace.spec.items.foo" + }, + } + } + "v1devworkspace.spec.items.foo": { + "type": "string", + "description": "Type of funding or platform through which funding is possible." + }, + } + } + """ + if 'properties' in curr_object['items']: + root_definitions_object[current_path] = curr_object + + path = current_path + pathList = current_path.split('.') + if pathList[-1] == 'items': + pathList = pathList[:-1] + path = '.'.join(pathList) + + for prop in curr_object['items']['properties']: + new_path = current_path + '.' + prop + new_object = curr_object['items']['properties'][prop] + + # openapi-generator doesn't accept oneOf so we have to remove them + if 'oneOf' in new_object: + del new_object['oneOf'] + root_definitions_object[new_path] = new_object + queue.append({ + new_path: new_object + }) + curr_object['items']['properties'][prop] = { + '$ref': create_ref(new_path) + } + root_definitions_object[path] = curr_object['items'] + curr_object['items'] = { + '$ref': create_ref(path) + } + else: + root_definitions_object[current_path] = curr_object + +def add_definition(root_definitions_object: dict, current_path: str, curr_object: dict, queue: list) -> None: + """ + Create a property or item definition depending on if property or items is in the current_object + """ + if 'properties' in curr_object: + add_property_definition(root_definitions_object, current_path, curr_object, queue) + elif 'items' in curr_object: + add_item_definition(root_definitions_object, current_path, curr_object, queue) + +def flatten(consolidated_crds_object: dict) -> None: + """ + Flatten and then produce a new swagger.json file that can be processed by open-api-generator + """ + original_definitions = consolidated_crds_object['definitions'] + flattened_swagger_object = { + 'definitions': {}, + 'paths': {}, + 'info': { + 'title': 'Kubernetes', + 'version': 'unversioned' + }, + 'swagger': '2.0' + } + for root in original_definitions: + flattened_swagger_object['definitions'][root] = original_definitions[root] + + queue = [] + + # Add in all the initial properties to the queue + for prop in original_definitions[root]['properties']: + new_path = root + '.' + prop + queue.append({ + new_path: original_definitions[root]['properties'][prop] + }) + + # Create a new definition so that the properties are pulled out correctly + flattened_swagger_object['definitions'][new_path] = original_definitions[root]['properties'][prop] + + # Create a ref from the property such as spec to the new path such as v1alpha1.devworkspaces.workspace.devfile.io_spec + original_definitions[root]['properties'][prop] = { + '$ref': create_ref(new_path) + } + + # Continue until all properties have been flattened + while len(queue) != 0: + next_item = queue.pop().popitem() + path = next_item[0] + new_object = next_item[1] + add_definition(flattened_swagger_object['definitions'], path, new_object, queue) + + write_json('swagger.json', flattened_swagger_object) + +if __name__ == "__main__": + swagger_crds_json = consolidate_crds() + flatten(swagger_crds_json) diff --git a/.github/actions/generate_types/requirements.txt b/.github/actions/generate_types/requirements.txt new file mode 100644 index 000000000..756d3982b --- /dev/null +++ b/.github/actions/generate_types/requirements.txt @@ -0,0 +1,2 @@ +requests==2.24.0 +PyYAML==5.4.1 diff --git a/.github/workflows/release-typescript-models.yaml b/.github/workflows/release-typescript-models.yaml new file mode 100644 index 000000000..fa3556e33 --- /dev/null +++ b/.github/workflows/release-typescript-models.yaml @@ -0,0 +1,89 @@ + +# Release a typescript package to npm containing the typescript types generated from the latest merged crds +name: types + +on: + push: + branches: [ master ] + +jobs: + release-typescript-models: + runs-on: ubuntu-latest + steps: + - name: Checkout devfile/api + uses: actions/checkout@v2 + with: + path: api + + - name: Checkout kubernetes-client/gen + uses: actions/checkout@v2 + with: + repository: kubernetes-client/gen + path: gen + ref: 5c6d90b260fd94af32157f304f971778c899b5e2 + + - name: Setup python + uses: actions/setup-python@v2 + with: + python-version: '3.9.2' + + - name: Install Python dependencies + uses: py-actions/py-dependency-install@v2 + with: + path: "api/.github/actions/generate_types/requirements.txt" + + - name: Generate openapi-generator compatible swagger.json + run: | + python .github/actions/generate_types/generate.py + mkdir -p /tmp/typescript-models + mv swagger.json /tmp/typescript-models/swagger.json.unprocessed + working-directory: api + + - name: Create empty client-gen configuration + run: | + { + echo 'export KUBERNETES_BRANCH=""' + echo 'export CLIENT_VERSION=""' + echo 'export PACKAGE_NAME=""' + echo 'export USERNAME=""' + echo 'export REPOSITORY=""' + } >> config.sh + working-directory: /tmp + + - name: Generate the typescript models + run: | + # Remove the contents of custom objects spec so that we aren't bundling any extra objects + echo "{}" > custom_objects_spec.json + export OPENAPI_SKIP_FETCH_SPEC=true + ./typescript.sh /tmp/typescript-models /tmp/config.sh + working-directory: gen/openapi + + - name: Modify package.json + run: | + sed -i 's/\"name\": \".*\"/"name": "@devfile\/api"/g' /tmp/typescript-models/package.json + sed -i 's/\"description\": \".*\"/"description": "Typescript types for devfile api"/g' /tmp/typescript-models/package.json + sed -i 's/\"repository\": \".*\"/"repository": "devfile\/api"/g' /tmp/typescript-models/package.json + sed -i 's/\"license\": \".*\"/"license": "EPL-2.0"/g' /tmp/typescript-models/package.json + + - name: Setup node + uses: actions/setup-node@v1 + with: + node-version: 12 + registry-url: 'https://registry.npmjs.org' + scope: '@devfile' + + - name: Install dependencies + run: yarn + working-directory: /tmp/typescript-models + + - name: Run build + run: yarn build + working-directory: /tmp/typescript-models + + - name: Release typescript models + run: | + yarn --new-version version "0.0.1-$(date +%s)" + yarn publish --access public + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} + working-directory: /tmp/typescript-models