diff options
| author | Zhineng Li <[email protected]> | 2026-02-12 19:00:33 +0800 |
|---|---|---|
| committer | Zhineng Li <[email protected]> | 2026-02-12 19:00:33 +0800 |
| commit | 328602707213990715fccbb98f46731b19289902 (patch) | |
| tree | dfbcb2455ad96af007c562f28c4055c75edd4bb7 | |
| download | acs-metadata-build-328602707213990715fccbb98f46731b19289902.tar.gz acs-metadata-build-328602707213990715fccbb98f46731b19289902.zip | |
first commit
| -rw-r--r-- | .editorconfig | 22 | ||||
| -rw-r--r-- | .gitignore | 1 | ||||
| -rw-r--r-- | LICENSE | 21 | ||||
| -rw-r--r-- | Makefile | 35 | ||||
| -rw-r--r-- | README | 45 | ||||
| -rw-r--r-- | php-generator/.gitignore | 2 | ||||
| -rw-r--r-- | php-generator/composer.json | 6 | ||||
| -rw-r--r-- | php-generator/generate.php | 78 | ||||
| -rwxr-xr-x | scripts/update-metadata.sh | 58 |
9 files changed, 268 insertions, 0 deletions
diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..bf37ca2 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,22 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true + +[*.sh] +indent_style = space +indent_size = 2 + +[*.php] +indent_style = space +indent_size = 4 + +[Makefile] +indent_style = tab +indent_size = 4 + +[README] +indent_style = tab +indent_size = 8 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..567609b --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +build/ @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 Zhineng Li + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..f7987a8 --- /dev/null +++ b/Makefile @@ -0,0 +1,35 @@ +BUILD_DIR ?= build +TAG ?= $(shell date +%Y%m%d) +JSON_TARBALL := $(BUILD_DIR)/release-$(TAG)-json.tar.gz +PHP_TARBALL := $(BUILD_DIR)/release-$(TAG)-php.tar.gz + +.PHONY: build build-php update clean + +build: build-json build-php + +build-json: update + tar czf "$(JSON_TARBALL)" -C "$(BUILD_DIR)/json" . + shasum -a 256 "$(JSON_TARBALL)" | awk '{print $$1}' > "$(JSON_TARBALL).sha256" + +build-php: update + cd php-generator && composer install --no-dev --optimize-autoloader + mkdir -p "$(BUILD_DIR)/php" + php php-generator/generate.php "$(BUILD_DIR)/json" "$(BUILD_DIR)/php" + tar czf "$(PHP_TARBALL)" -C "$(BUILD_DIR)/php" . + shasum -a 256 "$(PHP_TARBALL)" | awk '{print $$1}' > "$(PHP_TARBALL).sha256" + +update: $(BUILD_DIR)/.metadata.updated + +$(BUILD_DIR)/.metadata.updated: $(BUILD_DIR)/.metadata.en.updated $(BUILD_DIR)/.metadata.zh.updated + touch "$@" + +$(BUILD_DIR)/.metadata.en.updated: + LANGUAGE=EN_US BUILD_DIR="$(BUILD_DIR)/json" ./scripts/update-metadata.sh + touch "$@" + +$(BUILD_DIR)/.metadata.zh.updated: + LANGUAGE=ZH_CN BUILD_DIR="$(BUILD_DIR)/json" ./scripts/update-metadata.sh + touch "$@" + +clean: + rm -rf "$(BUILD_DIR)/" @@ -0,0 +1,45 @@ +ALIBABA CLOUD API METADATA +========================== + +INTRODUCTION +------------ +This repository provides scripts that generate and maintain Alibaba Cloud +API metadata from the OpenMeta service for API documentation and SDK +generation. + + +BUILD +----- +Metadata is updated regularly to reflect the latest API changes. It is +maintained in two formats: JSON and PHP arrays. + +JSON metadata: `scripts/update-metadata.sh` fetches the latest metadata +from Alibaba Cloud. + +PHP metadata: `php-generator/generate.php` converts JSON metadata into +PHP arrays. + +The Makefile automates the update and build process. Run: + + make + +The following artifacts will be generated in the `build/` directory: + +- release-<YYYYMMDD>-json.tar.gz +- release-<YYYYMMDD>-php.tar.gz + +To build only one format, run one of the following commands: + + make build-json + make build-php + +Because Alibaba Cloud provides many services, metadata updates can take +time. Set `CONCURRENCY` to control how many HTTP requests run at the same +time. Use `make -j2` to fetch English and Chinese metadata in parallel: + + CONCURRENCY=8 make -j2 + + +AUTHOR +------ +Zhineng Li <[email protected]> diff --git a/php-generator/.gitignore b/php-generator/.gitignore new file mode 100644 index 0000000..d1502b0 --- /dev/null +++ b/php-generator/.gitignore @@ -0,0 +1,2 @@ +vendor/ +composer.lock diff --git a/php-generator/composer.json b/php-generator/composer.json new file mode 100644 index 0000000..b3f0f41 --- /dev/null +++ b/php-generator/composer.json @@ -0,0 +1,6 @@ +{ + "require": { + "php": "^8.4", + "symfony/var-exporter": "^8.0" + } +} diff --git a/php-generator/generate.php b/php-generator/generate.php new file mode 100644 index 0000000..3f7e574 --- /dev/null +++ b/php-generator/generate.php @@ -0,0 +1,78 @@ +<?php + +declare(strict_types=1); + +use Symfony\Component\VarExporter\VarExporter; + +require __DIR__.'/vendor/autoload.php'; + +// Requires three parameters +// 1. The script name +// 2. The source path for metadata JSON files +// 3. The destination path to store the artifact +if ($argc !== 3) { + printf('Usage: php %s <src> <dst>'.PHP_EOL, $argv[0]); + exit(1); +} + +[$src, $dst] = [$argv[1], $argv[2]]; +$srcPath = new \SplFileInfo($src); +$dstPath = new \SplFileInfo($dst); + +// Validate the source path +if ($srcPath->getRealPath() === false) { + printf('The source path "%s" does not exist.'.PHP_EOL, $srcPath->getPathname()); + exit(1); +} + +if ($srcPath->isDir() === false) { + printf('The source path "%s" is not a directory.'.PHP_EOL, $srcPath->getPathname()); + exit(1); +} + +// Validate the destination path +if ($dstPath->getRealPath() === false) { + printf('The destination path "%s" does not exist.'.PHP_EOL, $dstPath->getPathname()); + exit(1); +} + +if ($dstPath->isDir() === false) { + printf('The destination path "%s" is not a directory.'.PHP_EOL, $dstPath->getPathname()); + exit(1); +} + +$iterator = new \RecursiveIteratorIterator(new \RecursiveCallbackFilterIterator( + new \RecursiveDirectoryIterator($srcPath->getRealPath(), \FilesystemIterator::SKIP_DOTS), + static function (\SplFileInfo $file) use ($srcPath): bool { + if ($file->isDir()) { + $relativePath = substr($file->getRealPath(), strlen($srcPath->getRealPath())); + + return str_starts_with($relativePath, DIRECTORY_SEPARATOR.'en_us') + || str_starts_with($relativePath, DIRECTORY_SEPARATOR.'zh_cn'); + } + + return $file->isFile() && $file->getExtension() === 'json'; + } +)); + +foreach ($iterator as $file) { + $relativePath = substr($file->getRealPath(), strlen($srcPath->getRealPath())); + + printf('[-] Generate %s'.PHP_EOL, $relativePath); + + // Change the file extension from json to php + $out = $dstPath->getRealPath().substr($relativePath, 0, -4).'php'; + $outdir = dirname($out); + + if (! file_exists($outdir)) { + mkdir($outdir, 0o755, recursive: true); + } + + $contents = file_get_contents($file->getRealPath()); + $decoded = json_decode($contents, associative: true, flags: JSON_THROW_ON_ERROR); + $result = sprintf('<?php return %s;'.PHP_EOL, VarExporter::export($decoded)); + + file_put_contents($out, $result, flags: LOCK_EX); +} + +echo '[-] Generate successfully'.PHP_EOL; diff --git a/scripts/update-metadata.sh b/scripts/update-metadata.sh new file mode 100755 index 0000000..f9ab363 --- /dev/null +++ b/scripts/update-metadata.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash +set -euo pipefail + +BASE_URL="${BASE_URL:-https://api.alibabacloud.com}" +LANGUAGE="${LANGUAGE:-EN_US}" +BUILD_DIR="${BUILD_DIR:-./build}" +CONCURRENCY=${CONCURRENCY:-4} + +NORMALIZED_LANGUAGE=$(echo "${LANGUAGE}" | tr "[:upper:]" "[:lower:]") +TARGET_DIR="${BUILD_DIR%/}/${NORMALIZED_LANGUAGE}" +PRODUCTS_FILE="${TARGET_DIR}/products.json" + +# Let's get started +echo "[-] Update metadata with language ${LANGUAGE}" + +# Reset the directory for legacy product removal +mkdir -p "${BUILD_DIR}" +rm -rf "${TARGET_DIR}" && mkdir -p "${TARGET_DIR}" + +# Update product list +echo "[-] Updating product list" +curl -sSf "${BASE_URL}/meta/v1/products.json?language=${LANGUAGE}" -o "$PRODUCTS_FILE" + +# Update all available APIs +counter=0 +while read -r product; do + # Extract the product code + code=$(jq -r '.code' <<< "$product") + code_lowercase=$(tr "[:upper:]" "[:lower:]" <<< "$code") + + # Loop through each version for the current product + while read -r version; do + api_docs_url="${BASE_URL}/meta/v1/products/${code}/versions/${version}/api-docs.json?language=${LANGUAGE}" + + version_dir="${TARGET_DIR}/${code_lowercase}/${version}" + api_docs_path="${version_dir}/api-docs.json" + + # Create the directory if it doesn't exist + mkdir -p "$version_dir" + + echo "[-] Updating ${code} ${version}" + curl -sSf "${api_docs_url}" -o "$api_docs_path" & + + # Increment the counter + ((++counter)) + + # If the counter reaches the maximum number of jobs, wait for them + # to finish before continuing + if ((counter >= CONCURRENCY)); then + wait + counter=0 + fi + done < <(jq -r '.versions[]' <<< "$product") +done < <(jq -c '.[]' "$PRODUCTS_FILE") + +# Wait for all background processes to finish before exiting +wait +echo "[-] Update done" |
