From 223353ff146faff7f9e51f9d8e1ac5fdcd59ee7b Mon Sep 17 00:00:00 2001 From: Twisha Bansal Date: Wed, 15 Apr 2026 11:14:31 +0530 Subject: [PATCH 1/4] feat: add support for skills --- .../workflows/skills-validate-fallback.yml | 37 +++ .github/workflows/skills-validate.yml | 56 ++++ .gitignore | 1 + gemini-extension.json | 18 -- skills-gen/db_groups.md | 253 ++++++++++++++++++ skills-gen/skill-support.md | 74 +++++ skills/cloud-sql-mysql-admin/SKILL.md | 125 +++++++++ .../scripts/create_database.js | 105 ++++++++ .../scripts/create_instance.js | 105 ++++++++ .../scripts/create_user.js | 105 ++++++++ .../scripts/get_instance.js | 105 ++++++++ .../scripts/list_databases.js | 105 ++++++++ .../scripts/list_instances.js | 105 ++++++++ .../scripts/wait_for_operation.js | 105 ++++++++ skills/cloud-sql-mysql-data/SKILL.md | 75 ++++++ .../scripts/execute_sql.js | 105 ++++++++ .../scripts/get_query_plan.js | 105 ++++++++ .../scripts/list_active_queries.js | 105 ++++++++ .../scripts/list_tables.js | 105 ++++++++ skills/cloud-sql-mysql-lifecycle/SKILL.md | 113 ++++++++ .../scripts/clone_instance.js | 105 ++++++++ .../scripts/create_backup.js | 105 ++++++++ .../scripts/get_instance.js | 105 ++++++++ .../scripts/list_instances.js | 105 ++++++++ .../scripts/restore_backup.js | 105 ++++++++ .../scripts/wait_for_operation.js | 105 ++++++++ skills/cloud-sql-mysql-monitor/SKILL.md | 203 ++++++++++++++ .../scripts/get_query_metrics.js | 105 ++++++++ .../scripts/get_query_plan.js | 105 ++++++++ .../scripts/get_system_metrics.js | 105 ++++++++ .../scripts/list_active_queries.js | 105 ++++++++ .../scripts/list_table_fragmentation.js | 105 ++++++++ .../scripts/list_table_stats.js | 105 ++++++++ .../list_tables_missing_unique_indexes.js | 105 ++++++++ 34 files changed, 3457 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/skills-validate-fallback.yml create mode 100644 .github/workflows/skills-validate.yml create mode 100644 .gitignore create mode 100644 skills-gen/db_groups.md create mode 100644 skills-gen/skill-support.md create mode 100644 skills/cloud-sql-mysql-admin/SKILL.md create mode 100755 skills/cloud-sql-mysql-admin/scripts/create_database.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/create_instance.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/create_user.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/get_instance.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/list_databases.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/list_instances.js create mode 100755 skills/cloud-sql-mysql-admin/scripts/wait_for_operation.js create mode 100644 skills/cloud-sql-mysql-data/SKILL.md create mode 100755 skills/cloud-sql-mysql-data/scripts/execute_sql.js create mode 100755 skills/cloud-sql-mysql-data/scripts/get_query_plan.js create mode 100755 skills/cloud-sql-mysql-data/scripts/list_active_queries.js create mode 100755 skills/cloud-sql-mysql-data/scripts/list_tables.js create mode 100644 skills/cloud-sql-mysql-lifecycle/SKILL.md create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/clone_instance.js create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/create_backup.js create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/get_instance.js create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/list_instances.js create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/restore_backup.js create mode 100755 skills/cloud-sql-mysql-lifecycle/scripts/wait_for_operation.js create mode 100644 skills/cloud-sql-mysql-monitor/SKILL.md create mode 100755 skills/cloud-sql-mysql-monitor/scripts/get_query_metrics.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/get_query_plan.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/get_system_metrics.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/list_active_queries.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/list_table_fragmentation.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/list_table_stats.js create mode 100755 skills/cloud-sql-mysql-monitor/scripts/list_tables_missing_unique_indexes.js diff --git a/.github/workflows/skills-validate-fallback.yml b/.github/workflows/skills-validate-fallback.yml new file mode 100644 index 0000000..b49eb62 --- /dev/null +++ b/.github/workflows/skills-validate-fallback.yml @@ -0,0 +1,37 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: lint + +on: + push: + paths-ignore: + - "skills/**" + pull_request: + paths-ignore: + - "skills/**" + pull_request_target: + types: [labeled] + paths-ignore: + - "skills/**" + workflow_dispatch: + +jobs: + skills-validate: + runs-on: ubuntu-latest + steps: + - name: Skip Skill Validation + run: | + echo "No changes detected in 'skills/' directory. Skipping validation." + echo "This job ensures the required 'skills-validate' status check passes." diff --git a/.github/workflows/skills-validate.yml b/.github/workflows/skills-validate.yml new file mode 100644 index 0000000..becb9b4 --- /dev/null +++ b/.github/workflows/skills-validate.yml @@ -0,0 +1,56 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Validate Skills + +on: + push: + paths: + - "skills/**" + pull_request: + paths: + - "skills/**" + pull_request_target: + types: [labeled] + paths: + - "skills/**" + +jobs: + skills-validate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 + + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 + with: + python-version: "3.11" + + - name: Install skills-ref + run: | + pip install "git+https://github.com/agentskills/agentskills.git#subdirectory=skills-ref" + + - name: Validate Skills + run: | + failed=0 + for skill_dir in skills/*/; do + if [ -d "$skill_dir" ]; then + echo "Validating $skill_dir..." + if ! skills-ref validate "$skill_dir"; then + echo "Validation failed for $skill_dir" + failed=1 + fi + fi + done + exit $failed diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4f20322 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +skills-gen/creds.sh \ No newline at end of file diff --git a/gemini-extension.json b/gemini-extension.json index c46401a..2221178 100644 --- a/gemini-extension.json +++ b/gemini-extension.json @@ -2,24 +2,6 @@ "name": "cloud-sql-mysql", "version": "0.1.9", "description": "Connect and interact with a Cloud SQL for MySQL database and data", - "mcpServers": { - "cloud_sql_mysql_admin": { - "command": "${extensionPath}${/}toolbox", - "args": [ - "--prebuilt", - "cloud-sql-mysql-admin", - "--stdio" - ] - }, - "cloud_sql_mysql": { - "command": "${extensionPath}${/}toolbox", - "args": [ - "--prebuilt", - "cloud-sql-mysql", - "--stdio" - ] - } - }, "contextFileName": "CLOUD-SQL-MYSQL.md", "settings": [ { diff --git a/skills-gen/db_groups.md b/skills-gen/db_groups.md new file mode 100644 index 0000000..379b677 --- /dev/null +++ b/skills-gen/db_groups.md @@ -0,0 +1,253 @@ +# Refactoring Database Toolsets in MCP Toolbox + +## **Overview** + +Current telemetry indicates that exposing agents to more than [\~20 tools leads to a collapse in reasoning accuracy (\<40%)](https://docs.google.com/document/d/1gg47e4qcXJlZ2Zd1uOl5LPtrevGP7p1RTealXk288k0/edit?tab=t.0). To align with the [**MCP Toolbox Style Guide**](https://docs.google.com/document/d/1M_W98KfCt_mfM0vJjCkd1wU9fn6EcluWhtY0qKOAHxQ/edit?resourcekey=0-21OnBgfslm3l4NJfagLveA&tab=t.0), we must refactor our "monolithic" prebuilt values into sets of **5–8 tools** organized by **Critical User Journey**. + +### **Current Toolset Size Analysis** + +| Database Source | Current Tool Count | Status | Primary Reason for Bloat | +| :-------------------------------------- | :----------------- | :------------- | :---------------------------------------------------------------------------------------------------------------------------------------- | +| **Looker** | 33 | 🔴 Critical | Includes full SDK (Dev, Admin, and Query). | +| **AlloyDB Omni** | 31 | 🔴 Critical | High overlap with Postgres \+ Columnar specific tools. | +| **AlloyDB Postgres** | 29 | 🔴 Critical | Mixed Data, Maintenance, and Admin tools. | +| **AlloyDB Admin** | 10 | 🟡 High | Combined Cluster, Instance, and User management. | +| **Cloud SQL for PostgreSQL (Postgres)** | 29 | 🔴 Critical | Monolithic engine for data and maintenance. | +| **Cloud SQL Postgres Admin** | 11 | 🟡 High | Combined Instance and User management. | +| **BigQuery** | 10 | 🟡 High | Mixes Metadata discovery with ML Analytics. | +| **Cloud SQL MySQL Admin** | 10 | 🟡 High | Combined Instance and User management. | +| **Cloud SQL for MySQL** | 6 | ✅ Optimal | Mixes Admin lifecycle with Data exploration. | +| **Cloud SQL SQL Server Admin** | 10 | 🟡 High | Combined Instance and User management. | +| **Cloud SQL SQL Server** | 2 | ✅ Optimal | | +| **Observability (AlloyDB, Cloud SQL)** | 2 | ✅ Optimal | | +| **Healthcare API** | 15 | 🟡 High→ 👌 OK | Combined FHIR and DICOM protocols. Uses toolsets: cloud_healthcare_dataset_tools cloud_healthcare_fhir_tools cloud_healthcare_dicom_tools | +| **Firestore** | 9 | 🟡 High | Mixes Data operations with Rules management. | +| **Spanner** | 4 | ✅ Optimal | | +| **Dataplex** | 3 | ✅ Optimal | | + +### **Context** + +This doc will propose organizing prebuilt tools into discrete tool sets optimized for use cases. This can be exposed in Toolbox in a couple ways 1\) a new prebuilt toolset i.e. ./toolbox \--prebuilt alloydb-postgres-new-name or 2\) a named toolset within a prebuilt toolset i.e. ./toolbox \--prebuilt alloydb-postgres and use MCP server endpoint /mcp/{toolset_name}. Tools can be in multiple tool sets in order to have coverage of use case tasks. + +Note: please see the [Dictionary](?tab=t.dxjmda6q2112) and [Additional Context](?tab=t.dxjmda6q2112) for more background information. + +Related work includes the transition from [**Toolsets** to **Groups**](https://docs.google.com/document/d/1KUw2F1_kuHffsB2RGau6Ol0puKnMh_GMF3uO8UUKwh0/edit?resourcekey=0-ixAkamQ_UUvLPg1yzcyZ7w&tab=t.0) is a strategic architectural shift designed to create a unified collection for all MCP primitives—specifically integrating both **tools, resources, and prompts** into a single logical entity. Groupings allow for logical organization of functionality (e.g. email, calendar, etc). They also enable client-side filtering \- allowing the client or user to select only the relevant functionality for a specific task. This reduces context overload and minimizes the number of tokens sent to the LLM. +This evolution directly supports the generation of **Agent Skills**, as skills are currently produced on a per-toolset (now per-group) basis. By using the new description field in a Group definition, Toolbox can automatically populate the server instructions for the MCP server and the \--description flag required to define a skill's purpose and strategy in its SKILL.md file. + +I recommend approach 2\. Users can still get all tools in the default toolset but can limit by toolset for better performance. + +| Topic / Concept | \#1 New Prebuilt Flags (--prebuilt alloydb-dbadmin) | \#2 Named Toolsets within Prebuilt (/mcp/{toolset_name}) | +| :------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **Discoverability** | Visibility in documentation and \--help menus. | Visible in logs and UI. Requires users to know the specific MCP endpoint path or consult external API docs. Currently no mechanism to list available toolsets via the MCP endpoint. | +| **Granularity/Complexity** | Each flag represents a "bundled" identity. Can mix-and-match flags. **Flat:** Avoids hierarchical confusion but leads to "flag explosion" as more specialized use cases are added. | Allows a single server to host multiple logical groupings (e.g., read-only, schema-mgmt, data-ops). **Hierarchical:** Supports the MCP "Primitive Grouping" standard for organized, named collections within a single server. | +| **User Experience (UX)** | **Simple & Explicit:** One command yields one specific set of tools. Very "Unix-style" and predictable. | **Flexible but Complex:** The default endpoint might provide "everything," while named endpoints provide subsets. Could be confusing to debug. | +| **System Overhead** | **Higher:** Requires separate process instances or flag-parsing logic for every "new" prebuilt name added. | **Lower:** A single running MCP server can multiplex multiple toolsets via routing, saving resources. | +| **Tool Overlap** | Can lead to code duplication or complex symlinking in the backend to ensure a tool exists in two "flags." | Naturally supports overlap; the server logic simply maps the same function to multiple endpoint aliases. | +| **Skill Generation** | **Manual-ish:** Requires a specific flag per persona; easier to map one "Flag" to one "Skill Folder". | **Automated:** One prebuilt server can dynamically export multiple Groups as different Skills via its internal registry. | +| **Backward Compatibility** | **Low:** Requires breaking changes of toolsets | **Strong:** Does not impact existing MCP client-server URL structures. | + +This doc will also provide a comparison to the current OneMCP tool sets in: [Data Cloud OneMCP: Tools and Commitment Dashboard](https://docs.google.com/spreadsheets/d/1rXWhXONd5xqCpU-oJJ8eTSWtL4QVm9yuavjur-p_NXY/edit?gid=1286305679#gid=1286305679) + +### **Supporting Toolsets for STDIO** + +We will need a flag like `./toolbox --prebuilt alloydb-postgres/ops` where alloydb-postgres is the tool-source name and ops is the toolset name. + +## **Recommendation for Toolsets and Tool Names by Source** + +### **AlloyDB for PostgreSQL** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :---------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **alloydb-postgres-admin** | create_cluster get_cluster list_clusters create_instance get_instance list_instances database_overview wait_for_operation | 8 | Use these tools when you need to provision new AlloyDB clusters and instances, monitor their creation status, and retrieve high-level configuration or health data for the environment. | +| **access-management** | create_user, list_users, get_user, list_roles, list_pg_settings, database_overview | 6 | Use these tools when you need to manage database users, inspect permissions and roles, and verify global configuration parameters related to security and access control. | +| **alloydb-postgres-data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 8 | Use these tools when you need to explore the database schema, identify objects like views and triggers, and execute custom SQL queries to interact with your data. | +| **alloydb-postgres-monitor** | list_active_queries, list_query_stats, get_query_plan, get_query_metrics, get_system_metrics, long_running_transactions, list_locks, list_database_stats | 7 | Use these tools when you need to troubleshoot slow performance, analyze query execution plans, identify resource-heavy processes, and monitor system-level PromQL metrics. | +| **alloydb-postgres-health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, get_column_cardinality, list_autovacuum_configurations, list_tablespaces, database_overview, get_instance | 6 | Use these tools when you need to optimize storage, identify index issues, analyze table statistics, or manage autovacuum and tablespace configurations to maintain peak database health. | +| **alloydb-postgres-optimize** | list_available_extensions, list_installed_extensions, list_memory_configurations, list_pg_settings, database_overview, get_cluster | 8 | Use these tools when you need to discover and manage PostgreSQL extensions or fine-tune engine-level settings such as memory allocation and server configuration parameters. | +| **replication** | replication_stats, list_replication_slots, list_publication_tables, list_instances, get_instance, database_overview | 6 | Use these tools when you need to monitor replication health, manage sync states between nodes, and ensure the high availability and data distribution of your AlloyDB cluster. | + +**OneMCP Comparison:** + +- **Unique to MCP Toolbox:** Deep columnar recommendations (list_columnar_recommended_columns) and maintenance insights. +- **Missing in Toolbox:** delete_instance, update_instance, clone_cluster, export_data, import_data. +- **Alignment:** Both support core cluster and instance CRUD. + +### **AlloyDB Omni** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :-------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 8 | Use these tools when you need to explore the database structure, identify schema objects like views and triggers, and execute SQL queries to interact with your data. | +| **performance** | execute_sql, get_query_plan, list_query_stats, get_column_cardinality, list_table_stats, list_database_stats, list_active_queries | 7 | Use these tools when you need to analyze query performance, generate execution plans, check table/column statistics, and monitor overall database activity. | +| **monitor** | database_overview, list_active_queries, long_running_transactions, list_locks, list_database_stats, list_pg_settings | 7 | Use these tools when you need to troubleshoot production issues by identifying locks, tracking long-running transactions, and getting a high-level view of server state. | +| **optimize** | list_pg_settings, list_memory_configurations, list_available_extensions, list_installed_extensions, list_autovacuum_configurations, list_columnar_configurations, list_columnar_recommended_columns | 7 | Use these tools when you need to fine-tune the database engine settings, manage extensions, or optimize the columnar engine for better analytical performance. | +| **health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, list_tablespaces, database_overview, list_autovacuum_configurations | 6 | Use these tools when you need to audit database health, identify storage bloat, find broken indexes, and verify tablespace or maintenance configurations. | +| **replication** | replication_stats, list_replication_slots, list_publication_tables, database_overview | 4 | Use these tools when you need to monitor the health of database replication, manage sync states between nodes, and audit publication tables for distributed setups. | +| **access-control** | list_roles, list_pg_settings, database_overview | 3 | Use these tools when you need to manage user roles, inspect permissions, and verify security-related configuration parameters. | + +**OneMCP Comparison:**OneMCP can not support Omni + +### **BigQuery** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :--------------------- | :----------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **bigquery-data** | execute_sql list_dataset_ids list_table_ids get_dataset_info get_table_info search_catalog | 6 | Use these tools when you need to handle large-scale data exploration and dataset management. Use when users need to find data assets or run SQL at scale. Provides metadata discovery and query execution across the data warehouse. | +| **bigquery-analytics** | analyze_contribution ask_data_insights forecast search_catalog | 3 | Use these tools when you need to handle advanced data intelligence and predictive tasks. Use when a user asks "why" data changed or needs future projections. Provides automated insight generation and time-series forecasting. | + +**OneMCP Comparison:** + +- **Unique to MCP Toolbox:** Advanced analysis tools (analyze_contribution, forecast, search_catalog). +- **Parity:** High overlap on core metadata (list_dataset_ids, list_table_ids, get_table_info). + +### **Cloud SQL PostgreSQL & Standalone** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :-------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **admin** | create_instance, get_instance, list_instances, create_database, list_databases, create_user, wait_for_operation, clone_instance | 7 | Use these tools when you need to provision new Cloud SQL instances, create databases and users, clone existing environments, and monitor the progress of long-running operations. | +| **lifecycle** | create_backup, restore_backup, postgres_upgrade_precheck, wait_for_operation, database_overview, get_instance, list_instances | 8 | Use these tools when you need to manage the lifecycle of your instances, including performing backups and restores, checking major version upgrade compatibility, and monitoring overall instance status. | +| **data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 7 | Use these tools when you need to explore the database structure, discover schema objects like views or stored procedures, and execute custom SQL queries to interact with your data. | +| **monitor** | get_system_metrics, get_query_metrics, list_query_stats, get_query_plan, list_database_stats, list_active_queries, long_running_transactions, list_locks | 6 | Use these tools when you need to troubleshoot performance bottlenecks, analyze query execution plans, identify resource-heavy processes, and monitor system-level PromQL metrics. | +| **health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, get_column_cardinality, list_autovacuum_configurations, list_tablespaces, database_overview, list_pg_settings | | Use these tools when you need to audit database health, identify storage bloat, find invalid indexes, analyze table statistics, and manage maintenance configurations like autovacuum. | +| **view-config** | list_available_extensions, list_installed_extensions, list_memory_configurations, list_pg_settings, database_overview, get_instance | | Use these tools when you need to discover and manage PostgreSQL extensions or fine-tune engine-level settings such as memory allocation and server configuration parameters. | +| **replication** | replication_stats, list_replication_slots, list_publication_tables, list_roles, list_pg_settings, database_overview | | Use these tools when you need to monitor replication health, manage sync states between nodes, and audit database roles and security settings to ensure environment integrity. | + +**OneMCP Comparison:** + +- **Unique to MCP Toolbox:** Extension management (list_available_extensions) and query plan analysis. +- **Missing in Toolbox:** PostgreSQL lifecycle management (delete_instance, update_instance). + +### **Cloud SQL MySQL & Standalone** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :-------------------- | :--------------------------------------------------------------------------------------------------------------------------------------- | :---- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **admin** | create_instance get_instance list_instances create_database list_databases create_user wait_for_operation | 7 | Use these tools when you need to provision new Cloud SQL for MySQL instances, create databases and users, clone existing environments, and monitor the progress of infrastructure operations. | +| **data** | execute_sql, list_tables, get_query_plan, list_active_queries | 6 | Use these tools when you need to explore your database schema, execute SQL queries to interact with your data, and inspect how MySQL plans to execute your statements. | +| **monitor** | get_query_plan, list_active_queries, get_query_metrics, get_system_metrics, list_table_fragmentation, list_tables_missing_unique_indexes | 2 | Use these tools when you need to troubleshoot slow queries, analyze system-level PromQL metrics, and identify structural performance issues like table fragmentation or missing unique indexes. | +| **lifecycle** | create_backup restore_backup clone_instance list_instances wait_for_operation | 5 | Use these tools when you need to manage the durability and safety of your data by creating backups, restoring from previous states, or cloning instances for recovery and testing. | + +**OneMCP Comparison:** + +- **Unique to MCP Toolbox:** Maintenance and observability insights (list_table_fragmentation, get_query_plan). +- **Unique to OneMCP:** Instance lifecycle (delete_instance, update_instance), import_data, export_data, and user management (delete_user). + +### **Cloud SQL SQL Server & Standalone** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :-------------------- | :-------------------------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **admin** | create_instance get_instance list_instances create_database list_databases create_user wait_for_operation | 7 | Use these tools when you need to provision new Cloud SQL for SQL Server instances, create databases and users, clone existing environments, and monitor the progress of long-running operations. | +| **data** | execute_sql list_tables | 5 | Use these tools when you need to explore the database schema, execute SQL queries to interact with your data, and monitor system-level performance metrics using PromQL queries. | +| **monitor** | get_system_metrics | | Use these tools when you need to troubleshoot slow queries and analyze system-level PromQL metrics. | +| **lifecycle** | create_backup restore_backup clone_instance list_instances wait_for_operation | 5 | Use these tools when you need to manage the lifecycle and durability of your data, including creating backups, restoring from existing backups, and cloning instances for testing or migration. | + +**OneMCP Comparison:** + +- **Parity:** Core execution and administrative tools are aligned. + +### **Looker & Conversational Analytics** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :-------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **looker-modeling** | get_models get_explores get_dimensions get_measures get_filters get_parameters | 6 | Handles LookML semantic layer discovery. Use when the user needs to understand what data fields are available for analysis. Provides detailed exploration of dimensions, measures, and model structures. | +| **looker-content** | get_looks run_look make_look get_dashboards run_dashboard make_dashboard add_dashboard_element add_dashboard_filter | 8 | Manages user-facing BI assets like Looks and Dashboards. Use for creating, searching, or executing saved visualizations. Provides full lifecycle management for reporting content. | +| **looker-dev** | get_projects get_project_files get_project_file create_project_file update_project_file delete_project_file validate_project dev_mode | 8 | Focused on the developer workflow and LookML file management. Use for code changes, validation, and project exploration. Provides file-level CRUD operations and syntax checking. | +| **looker-ops** | health_pulse health_analyze health_vacuum get_connections get_connection_schemas get_connection_databases get_connection_tables get_connection_table_columns | 8 | Handles platform maintenance and database connection audits. Use for instance health checks or database schema discovery. Provides connectivity management and LookML cleanup suggestions. | + +**OneMCP Comparison:** Looker is unsupported by OneMCP due to no OP API + +### + +### **Firestore** + +| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | +| :--------------------- | :--------------------------------------------------------------------------------------------- | :---- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| **firestore-data** | get_documents add_documents update_document delete_documents query_collection list_collections | 6 | Handles NoSQL document operations and collection hierarchy exploration. Use for CRUD tasks and data retrieval. Provides flexible document manipulation and structured querying. | +| **firestore-security** | get_rules validate_rules | 2 | Manages access control and security compliance. Use when auditing permissions or deploying new security logic. Provides rule retrieval and syntax validation. | + +**OneMCP Comparison:** + +- **Unique to OneMCP:** Field-level management (field_get, field_update), backup management (backup_get, backup_delete), and schema/insights tools. +- **Missing in Toolbox:** database creation, import_data, export_data, and backup_schedule management. + +### **Healthcare API** + +No changes required. Already use toolsets. + +**OneMCP Comparison:** + +- OneMCP does not currently list a specific Healthcare API toolset. MCP Toolbox provides a specialized competitive advantage here. + +### **Spanner** + +_Includes: GoogleSQL and PostgreSQL dialects._ + +| Proposed Toolset | Recommended Tool Names | +| :------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------- | +| **spanner-data** (No changes required) | list_tables list_graphs execute_sql execute_dql_sql | +| **spanner-admin** (Future/Unplanned) | create_instance get_instance update_instance delete_instance list_instances create_database get_database update_schema drop_database get_operation_status | + +**OneMCP Comparison:** + +- **Unique to OneMCP:** Session management (create_session, commit). +- **Missing in Toolbox:** Admin/Lifecycle tools + +### **Dataplex** + +| Proposed Toolset | Recommended Tool Names | +| :------------------------------------------- | :------------------------------------------------------------------ | +| **dataplex-discovery** (No changes required) | search_entries lookup_entry search_aspect_types | +| **dataplex-quality** (Coming soon) | get_data_profile get_data_quality run_profile_scan run_quality_scan | + +**OneMCP Comparison:** + +- **Unique to OneMCP:** get_lineage_graph + +## **Additional Work** + +### **GitHub PR Check** + +We should add a test or a Gemini CLI review for keeping track of toolset sizes. + +### **Versioning Policy** + +We need to write a versioning guide for toolsets to answer questions like are toolsets changes breaking changes? + +## **Appendix** + +### **3P Toolsets** + +The following toolsets are already compliant with size limits and require no structural changes: + +- **ClickHouse**: execute_sql, list_databases, list_tables +- **Elasticsearch**: execute_query +- **Neo4j**: execute_cypher, get_schema +- **Spark**: list_batches, get_batch, cancel_batch, create_pyspark_batch, create_spark_batch +- **SQLite**: execute_sql, list_tables +- **SingleStore / Snowflake / MindsDB / OceanBase**: execute_sql, list_tables + +| Current Name | Recommended Change | Reason | +| :------------------ | :------------------------ | :------------------------------- | +| mindsdb-execute-sql | execute_sql | Remove redundant prefix. | +| mindsdb-sql | execute_parameterized_sql | Remove prefix; increase clarity. | +| execute_esql_query | execute_query | Remove engine-specific acronyms. | +| fhir_patient_search | search_patients | Outcome-oriented naming. | + +### **No MCP Toolbox Support** + +#### **Dataform OneMCP** + +MCP Toolbox currently supports dataform-compile. + +| Proposed Toolset | Recommended Tool Names | +| :----------------- | :----------------------------------------------------------------------------------- | +| **dataform-repo** | create_repository, delete_repository, list_repositories | +| **dataform-files** | search_file, rename_file, write_file, delete_file, read_file, list_files_and_folders | +| **dataform-ops** | compile_pipeline, trigger_pipeline | + +#### **Bigtable OneMCP** + +MCP Toolbox currently supports bigtable-sql. + +| Proposed Toolset | Recommended Tool Names | +| :---------------- | :-------------------------------------------------------------------------- | +| **bigtable-data** | list_instances, get_instance_info, list_tables, get_table_info, execute_sql | + +[image1]: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABsAAAAbCAMAAAC6CgRnAAADAFBMVEX///+5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+bLcuzYlvHcofPpxffu0fnGZuvTiu/hrfT79P7///+9T+jlufb26PzPfu7CW+ny3Prn2uy+msydaLKVW6t8NZjfzeb38/mMTqWEQp7OtNmldLitgb/WwN+DN6KLOauaPL+pQNKuQNiePcSHOKd/Np2mP87Gp9KxQdy1QuG2jsWiPsnx5/Xv5vKTOrXt0Pjq0PTmz+/bwuXXtuO2Td7q2/HizurFnNW+kNCoa8CWO7qPOrG4WN3NqdoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACsRY1SAAAAEHRSTlMAEECAoMDQMFCwcJDg8GAggbhKsQAAANBJREFUeF6NkjkOAjEMRR2GYYkbGs7B/S9Dj9yEYmiQELG/s8wMAn6RxHnyEjuBqk7YUr0IcwA5djZDBRpbEJVSZR+QwU1vh7Gkh9ncIraHmykzR05UTlOJmRFzO5u2vmckdKapmlkBIaNHcoqozS+Od6KZL/JZ6U/LI8KlW2AXWymCstuIeQRjJJpo17GbLodBzKgvBbvq8xYk58M4WEtU0qHSFzGHnlCbA6+IMgSV2s2ipLUka5t25DX4/d5m+23uv/4LrShq+ON/qhw7yHoD2LYrfWJ+HeEAAAAASUVORK5CYII= diff --git a/skills-gen/skill-support.md b/skills-gen/skill-support.md new file mode 100644 index 0000000..8a8f723 --- /dev/null +++ b/skills-gen/skill-support.md @@ -0,0 +1,74 @@ +You're a senior software engineer who's working to support skills in extensions for Google Data Cloud. + +To support that create the following PRs in your repo. For each PR, first make the desired changes and then create a PR in the github repo. Please ensure that the PRs have good branch names as well as PR titles and descriptions. Directly create PRs in the repo. DO NOT CREATE ANY FORKS. All PRs should be created in a draft state. + +# PR1: Add support for skills + +1. Generate new skills using the latest toolbox binary (v1.1.0). Toolbox installation command: + +```bash +export VERSION=1.1.0 +curl -L -o toolbox2 https://storage.googleapis.com/mcp-toolbox-for-databases/v$VERSION/darwin/arm64/toolbox +chmod +x toolbox2 +``` + +Then set the required env vars for configuration. You can +find those in the configuration section in the readme file. Eg. https://github.com/gemini-cli-extensions/alloydb?tab=readme-ov-file#configuration. + +Command example to generate skills for a toolset: + +```bash +./toolbox2 --prebuilt cloud-sql-postgres skills-generate --name "cloud-sql-postgres-health" --description "Use these skills when you need to audit database health, identify storage bloat, find invalid indexes, analyze table statistics, and manage maintenance configurations like autovacuum." --toolset=monitor --license-header "// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the \"License\"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an \"AS IS\" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License." --additional-notes="Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence." --additional-notes="Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence." +``` + +Write the commands for all tools in generate-skills-repo.sh script. Do not commit this file. + +Remember to generate skills for all toolsets mentioned in https://github.com/googleapis/mcp-toolbox/tree/main/internal/prebuiltconfigs/tools for the source. Eg. AlloyDB Source: https://github.com/googleapis/mcp-toolbox/blob/main/internal/prebuiltconfigs/tools/alloydb-postgres.yaml. + +Find list of sources and their tools grouping along with toolset descriptions [here](./db_groups.md). + +After skills generation, cross check the generated skills from the [source yaml files](https://github.com/googleapis/mcp-toolbox/tree/main/internal/prebuiltconfigs/tools) and the [document](./db_groups.md) provided above. + +Now, for all generated skills, replace skill descriptions from using tools -> skills. + +2. Add skills validation workflow. Eg: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/.github/workflows/skills-validate.yml and https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/.github/workflows/skills-validate-fallback.yml. + +3. Remove the MCP servers from the gemini-extension.json file. + +# PR2: Remove packaging workflow + +1. Remove the github package and upload assets workflow: .github/workflows/package-and-upload-assets.yml. + +# PR3: Add Claude code plugin config + +1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/137. Ensure that the plugin.json user config is consistent with the env vars in the gemini-extension.json file. + +# PR4: Add Codex plugin config + +1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/138. Ensure that the system prompt you're using here is consistent with the gemini-extension context file. + +# PR5: Auto update plugin versions using rp + +1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/155. Also ensure that "README.md" is present in extra-files section for release-please. + +# PR6: Make skill docs changes: + +1. Find the context file in gemini-extension.json, for eg. CLOUD-SQL-POSTGRESQL.md for the CLOUD SQL repo: https://github.com/gemini-cli-extensions/cloud-sql-postgresql and make changes analogous to what was done in https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/109/. + +2. Make changes to the README.md and DEVELOPER.md files. Use https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/DEVELOPER.md and https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/README.md on potential changes. + +3. Check for any "tools" mention in the repository that needs to be updated to skills. + +4. Ensure that you have covered updates on the README on how to use the extensions with Gemini CLI, Claude Code, Codex and Antigravity. Be very through with looking into this: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/README.md and making updates. Notice that this readme has a table of contents and collapsible sections for claude code etc. diff --git a/skills/cloud-sql-mysql-admin/SKILL.md b/skills/cloud-sql-mysql-admin/SKILL.md new file mode 100644 index 0000000..6f1624e --- /dev/null +++ b/skills/cloud-sql-mysql-admin/SKILL.md @@ -0,0 +1,125 @@ +--- +name: cloud-sql-mysql-admin +description: Use these skills when you need to provision new Cloud SQL for MySQL instances, create databases and users, clone existing environments, and monitor the progress of infrastructure operations. +--- + +## Usage + +All scripts can be executed using Node.js. Replace `` and `` with actual values. + +**Bash:** +`node /scripts/.js '{"": ""}'` + +**PowerShell:** +`node /scripts/.js '{\"\": \"\"}'` + +Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence. + + +## Scripts + + +### create_database + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instance | string | The ID of the instance where the database will be created. | Yes | | +| name | string | The name for the new database. Must be unique within the instance. | Yes | | + + +--- + +### create_instance + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| name | string | The name of the instance | Yes | | +| databaseVersion | string | The database version for MySQL. If not specified, defaults to the latest available version (e.g., MYSQL_8_4). | No | `MYSQL_8_4` | +| rootPassword | string | The root password for the instance | Yes | | +| editionPreset | string | The edition of the instance. Can be `Production` or `Development`. This determines the default machine type and availability. Defaults to `Development`. | No | `Development` | + + +--- + +### create_user + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instance | string | The ID of the instance where the user will be created. | Yes | | +| name | string | The name for the new user. Must be unique within the instance. | Yes | | +| password | string | A secure password for the new user. Not required for IAM users. | No | | +| iamUser | boolean | Set to true to create a Cloud IAM user. | Yes | | + + +--- + +### get_instance + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| projectId | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instanceId | string | The instance ID | Yes | | + + +--- + +### list_databases + +Lists all databases for a Cloud SQL instance. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instance | string | The instance ID | Yes | | + + +--- + +### list_instances + +Lists all type of Cloud SQL instances for a project. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | + + +--- + +### wait_for_operation + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| operation | string | The operation ID | Yes | | + + +--- + diff --git a/skills/cloud-sql-mysql-admin/scripts/create_database.js b/skills/cloud-sql-mysql-admin/scripts/create_database.js new file mode 100755 index 0000000..ca2ced2 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/create_database.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "create_database"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/create_instance.js b/skills/cloud-sql-mysql-admin/scripts/create_instance.js new file mode 100755 index 0000000..3c9a178 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/create_instance.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "create_instance"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/create_user.js b/skills/cloud-sql-mysql-admin/scripts/create_user.js new file mode 100755 index 0000000..5b36f99 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/create_user.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "create_user"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/get_instance.js b/skills/cloud-sql-mysql-admin/scripts/get_instance.js new file mode 100755 index 0000000..7b555e5 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/get_instance.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_instance"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/list_databases.js b/skills/cloud-sql-mysql-admin/scripts/list_databases.js new file mode 100755 index 0000000..a3ee6d8 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/list_databases.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_databases"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/list_instances.js b/skills/cloud-sql-mysql-admin/scripts/list_instances.js new file mode 100755 index 0000000..d0f39e6 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/list_instances.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_instances"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-admin/scripts/wait_for_operation.js b/skills/cloud-sql-mysql-admin/scripts/wait_for_operation.js new file mode 100755 index 0000000..6135122 --- /dev/null +++ b/skills/cloud-sql-mysql-admin/scripts/wait_for_operation.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "wait_for_operation"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-data/SKILL.md b/skills/cloud-sql-mysql-data/SKILL.md new file mode 100644 index 0000000..1df8dc8 --- /dev/null +++ b/skills/cloud-sql-mysql-data/SKILL.md @@ -0,0 +1,75 @@ +--- +name: cloud-sql-mysql-data +description: Use these skills when you need to explore your database schema, execute SQL queries to interact with your data, and inspect how MySQL plans to execute your statements. +--- + +## Usage + +All scripts can be executed using Node.js. Replace `` and `` with actual values. + +**Bash:** +`node /scripts/.js '{"": ""}'` + +**PowerShell:** +`node /scripts/.js '{\"\": \"\"}'` + +Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence. + + +## Scripts + + +### execute_sql + +Use this skill to execute SQL. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| sql | string | The sql to execute. | Yes | | + + +--- + +### get_query_plan + +Provide information about how MySQL executes a SQL statement. Common use cases include: 1) analyze query plan to improve its performance, and 2) determine effectiveness of existing indexes and evalueate new ones. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| sql_statement | string | The sql statement to explain. | Yes | | + + +--- + +### list_active_queries + +Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| min_duration_secs | integer | Optional: Only show queries running for at least this long in seconds | No | `0` | +| limit | integer | Optional: The maximum number of rows to return. | No | `100` | + + +--- + +### list_tables + +Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| table_names | string | Optional: A comma-separated list of table names. If empty, details for all tables will be listed. | No | `` | +| output_format | string | Optional: Use 'simple' for names only or 'detailed' for full info. | No | `detailed` | + + +--- + diff --git a/skills/cloud-sql-mysql-data/scripts/execute_sql.js b/skills/cloud-sql-mysql-data/scripts/execute_sql.js new file mode 100755 index 0000000..27e4127 --- /dev/null +++ b/skills/cloud-sql-mysql-data/scripts/execute_sql.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "execute_sql"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-data/scripts/get_query_plan.js b/skills/cloud-sql-mysql-data/scripts/get_query_plan.js new file mode 100755 index 0000000..d49e104 --- /dev/null +++ b/skills/cloud-sql-mysql-data/scripts/get_query_plan.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_query_plan"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-data/scripts/list_active_queries.js b/skills/cloud-sql-mysql-data/scripts/list_active_queries.js new file mode 100755 index 0000000..ca9e382 --- /dev/null +++ b/skills/cloud-sql-mysql-data/scripts/list_active_queries.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_active_queries"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-data/scripts/list_tables.js b/skills/cloud-sql-mysql-data/scripts/list_tables.js new file mode 100755 index 0000000..b8f38a7 --- /dev/null +++ b/skills/cloud-sql-mysql-data/scripts/list_tables.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_tables"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/SKILL.md b/skills/cloud-sql-mysql-lifecycle/SKILL.md new file mode 100644 index 0000000..f2e6c86 --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/SKILL.md @@ -0,0 +1,113 @@ +--- +name: cloud-sql-mysql-lifecycle +description: Use these skills when you need to manage the durability and safety of your data by creating backups, restoring from previous states, or cloning instances for recovery and testing. +--- + +## Usage + +All scripts can be executed using Node.js. Replace `` and `` with actual values. + +**Bash:** +`node /scripts/.js '{"": ""}'` + +**PowerShell:** +`node /scripts/.js '{\"\": \"\"}'` + +Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence. + + +## Scripts + + +### clone_instance + +Clone an existing Cloud SQL instance into a new instance. The clone can be a direct copy of the source instance, or a point-in-time-recovery (PITR) clone from a specific timestamp. The call returns a Cloud SQL Operation object. Call wait_for_operation skill after this, make sure to use multiplier as 4 to poll the opertation status till it is marked DONE. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| sourceInstanceName | string | The name of the instance to be cloned. | Yes | | +| destinationInstanceName | string | The name of the new instance that will be created by cloning the source instance. | Yes | | +| pointInTime | string | The timestamp in RFC 3339 format to which the source instance should be cloned. | No | | +| preferredZone | string | The preferred zone for the new instance. | No | | +| preferredSecondaryZone | string | The preferred secondary zone for the new instance. | No | | + + +--- + +### create_backup + +Creates a backup on a Cloud SQL instance. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instance | string | Cloud SQL instance ID. This does not include the project ID. | Yes | | +| location | string | Location of the backup run. | No | | +| backup_description | string | The description of this backup run. | No | | + + +--- + +### get_instance + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| projectId | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| instanceId | string | The instance ID | Yes | | + + +--- + +### list_instances + +Lists all type of Cloud SQL instances for a project. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | + + +--- + +### restore_backup + +Restores a backup on a Cloud SQL instance. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| target_project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| target_instance | string | Cloud SQL instance ID of the target instance. This does not include the project ID. | Yes | | +| backup_id | string | Identifier of the backup being restored. Can be a BackupRun ID, backup name, or BackupDR backup name. Use the full backup ID as provided, do not try to parse it | Yes | | +| source_project | string | GCP project ID of the instance that the backup belongs to. Only required if the backup_id is a BackupRun ID. | No | | +| source_instance | string | Cloud SQL instance ID of the instance that the backup belongs to. Only required if the backup_id is a BackupRun ID. | No | | + + +--- + +### wait_for_operation + + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| project | string | The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one. | No | | +| operation | string | The operation ID | Yes | | + + +--- + diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/clone_instance.js b/skills/cloud-sql-mysql-lifecycle/scripts/clone_instance.js new file mode 100755 index 0000000..323b55f --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/clone_instance.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "clone_instance"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/create_backup.js b/skills/cloud-sql-mysql-lifecycle/scripts/create_backup.js new file mode 100755 index 0000000..5fc85c8 --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/create_backup.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "create_backup"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/get_instance.js b/skills/cloud-sql-mysql-lifecycle/scripts/get_instance.js new file mode 100755 index 0000000..7b555e5 --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/get_instance.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_instance"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/list_instances.js b/skills/cloud-sql-mysql-lifecycle/scripts/list_instances.js new file mode 100755 index 0000000..d0f39e6 --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/list_instances.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_instances"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/restore_backup.js b/skills/cloud-sql-mysql-lifecycle/scripts/restore_backup.js new file mode 100755 index 0000000..92bff3f --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/restore_backup.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "restore_backup"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-lifecycle/scripts/wait_for_operation.js b/skills/cloud-sql-mysql-lifecycle/scripts/wait_for_operation.js new file mode 100755 index 0000000..6135122 --- /dev/null +++ b/skills/cloud-sql-mysql-lifecycle/scripts/wait_for_operation.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "wait_for_operation"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/SKILL.md b/skills/cloud-sql-mysql-monitor/SKILL.md new file mode 100644 index 0000000..862736b --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/SKILL.md @@ -0,0 +1,203 @@ +--- +name: cloud-sql-mysql-monitor +description: Use these skills when you need to troubleshoot slow queries, analyze system-level PromQL metrics, and identify structural performance issues like table fragmentation or missing unique indexes. +--- + +## Usage + +All scripts can be executed using Node.js. Replace `` and `` with actual values. + +**Bash:** +`node /scripts/.js '{"": ""}'` + +**PowerShell:** +`node /scripts/.js '{\"\": \"\"}'` + +Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence. + + +## Scripts + + +### get_query_metrics + +Fetches query level cloudmonitoring data (timeseries metrics) for queries running in Mysql instance using a PromQL query. Take projectID and instanceID from the user for which the metrics timeseries data needs to be fetched. +To use this skill, you must provide the Google Cloud `projectId` and a PromQL `query`. + +Generate PromQL `query` for Mysql query metrics. Use the provided metrics and rules to construct queries, Get the labels like `instance_id`, `query_hash` from user intent. If query_hash is provided then use the per_query metrics. Query hash and query id are same. + +Defaults: +1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user. + +PromQL Query Examples: +1. Basic Time Series: `avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m])` +2. Top K: `topk(30, avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +3. Mean: `avg(avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +4. Minimum: `min(min_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +5. Maximum: `max(max_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +6. Sum: `sum(avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +7. Count streams: `count(avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` +8. Percentile with groupby on resource_id, database: `quantile by ("resource_id","database")(0.99,avg_over_time({"__name__"="dbinsights.googleapis.com/aggregate/execution_time","monitored_resource"="cloudsql_instance_database","project_id"="my-projectId","resource_id"="my-projectId:my-instanceId"}[5m]))` + +Available Metrics List: metricname. description. monitored resource. labels. resource_id label format is `project_id:instance_id` which is actually instance id only. aggregate is the aggregated values for all query stats, Use aggregate metrics if query id is not provided. For perquery metrics do not fetch querystring unless specified by user specifically. Have the aggregation on query hash to avoid fetching the querystring. Do not use latency metrics for anything. +1. `dbinsights.googleapis.com/aggregate/latencies`: Cumulative query latency distribution per user and database. `cloudsql_instance_database`. `user`, `client_addr`, `database`, `project_id`, `resource_id`. +2. `dbinsights.googleapis.com/aggregate/execution_time`: Cumulative query execution time per user and database. `cloudsql_instance_database`. `user`, `client_addr`, `database`, `project_id`, `resource_id`. +3. `dbinsights.googleapis.com/aggregate/execution_count`: Total number of query executions per user and database. `cloudsql_instance_database`. `user`, `client_addr`, `database`, `project_id`, `resource_id`. +4. `dbinsights.googleapis.com/aggregate/lock_time`: Cumulative lock wait time per user and database. `cloudsql_instance_database`. `user`, `client_addr`, `lock_type`, `database`, `project_id`, `resource_id`. +5. `dbinsights.googleapis.com/aggregate/io_time`: Cumulative IO wait time per user and database. `cloudsql_instance_database`. `user`, `client_addr`, `database`, `project_id`, `resource_id`. +6. `dbinsights.googleapis.com/aggregate/row_count`: Total number of rows affected during query execution. `cloudsql_instance_database`. `user`, `client_addr`, `row_status`, `database`, `project_id`, `resource_id`. +7. `dbinsights.googleapis.com/perquery/latencies`: Cumulative query latency distribution per user, database, and query. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `query_hash`, `database`, `project_id`, `resource_id`. +8. `dbinsights.googleapis.com/perquery/execution_time`: Cumulative query execution time per user, database, and query. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `query_hash`, `database`, `project_id`, `resource_id`. +9. `dbinsights.googleapis.com/perquery/execution_count`: Total number of query executions per user, database, and query. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `query_hash`, `database`, `project_id`, `resource_id`. +10. `dbinsights.googleapis.com/perquery/lock_time`: Cumulative lock wait time per user, database, and query. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `lock_type`, `query_hash`, `database`, `project_id`, `resource_id`. +11. `dbinsights.googleapis.com/perquery/io_time`: Cumulative io wait time per user, database, and query. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `query_hash`, `database`, `project_id`, `resource_id`. +12. `dbinsights.googleapis.com/perquery/row_count`: Total number of rows affected during query execution. `cloudsql_instance_database`. `querystring`, `user`, `client_addr`, `query_hash`, `row_status`, `database`, `project_id`, `resource_id`. +13. `dbinsights.googleapis.com/pertag/latencies`: Cumulative query latency distribution per user, database, and tag. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`, `database`, `project_id`, `resource_id`. +14. `dbinsights.googleapis.com/pertag/execution_time`: Cumulative query execution time per user, database, and tag. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`, `database`, `project_id`, `resource_id`. +15. `dbinsights.googleapis.com/pertag/execution_count`: Total number of query executions per user, database, and tag. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`, `database`, `project_id`, `resource_id`. +16. `dbinsights.googleapis.com/pertag/lock_time`: Cumulative lock wait time per user, database and tag. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `lock_type`, `tag_hash`, `database`, `project_id`, `resource_id`. +17. `dbinsights.googleapis.com/pertag/io_time`: Cumulative IO wait time per user, database and tag. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`, `database`, `project_id`, `resource_id`. +18. `dbinsights.googleapis.com/pertag/row_count`: Total number of rows affected during query execution. `cloudsql_instance_database`. `user`, `client_addr`, `action`, `application`, `controller`, `db_driver`, `framework`, `route`, `tag_hash`, `row_status`, `database`, `project_id`, `resource_id`. + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| projectId | string | The Id of the Google Cloud project. | Yes | | +| query | string | The promql query to execute. | Yes | | + + +--- + +### get_query_plan + +Provide information about how MySQL executes a SQL statement. Common use cases include: 1) analyze query plan to improve its performance, and 2) determine effectiveness of existing indexes and evalueate new ones. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| sql_statement | string | The sql statement to explain. | Yes | | + + +--- + +### get_system_metrics + +Fetches system level cloudmonitoring data (timeseries metrics) for a MySQL instance using a PromQL query. Take projectId and instanceId from the user for which the metrics timeseries data needs to be fetched. +To use this skill, you must provide the Google Cloud `projectId` and a PromQL `query`. + +Generate PromQL `query` for MySQL system metrics. Use the provided metrics and rules to construct queries, Get the labels like `instance_id` from user intent. + +Defaults: +1. Interval: Use a default interval of `5m` for `_over_time` aggregation functions unless a different window is specified by the user. + +PromQL Query Examples: +1. Basic Time Series: `avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m])` +2. Top K: `topk(30, avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +3. Mean: `avg(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +4. Minimum: `min(min_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +5. Maximum: `max(max_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +6. Sum: `sum(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +7. Count streams: `count(avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` +8. Percentile with groupby on database_id: `quantile by ("database_id")(0.99,avg_over_time({"__name__"="cloudsql.googleapis.com/database/cpu/utilization","monitored_resource"="cloudsql_database","project_id"="my-projectId","database_id"="my-projectId:my-instanceId"}[5m]))` + +Available Metrics List: metricname. description. monitored resource. labels. database_id is actually the instance id and the format is `project_id:instance_id`. +1. `cloudsql.googleapis.com/database/cpu/utilization`: Current CPU utilization as a percentage of reserved CPU. `cloudsql_database`. `database`, `project_id`, `database_id`. +2. `cloudsql.googleapis.com/database/network/connections`: Number of connections to the database instance. `cloudsql_database`. `database`, `project_id`, `database_id`. +3. `cloudsql.googleapis.com/database/network/received_bytes_count`: Delta count of bytes received through the network. `cloudsql_database`. `database`, `project_id`, `database_id`. +4. `cloudsql.googleapis.com/database/network/sent_bytes_count`: Delta count of bytes sent through the network. `cloudsql_database`. `destination`, `database`, `project_id`, `database_id`. +5. `cloudsql.googleapis.com/database/memory/components`: Memory usage for components like usage, cache, and free memory. `cloudsql_database`. `component`, `database`, `project_id`, `database_id`. +6. `cloudsql.googleapis.com/database/disk/bytes_used_by_data_type`: Data utilization in bytes. `cloudsql_database`. `data_type`, `database`, `project_id`, `database_id`. +7. `cloudsql.googleapis.com/database/disk/read_ops_count`: Delta count of data disk read IO operations. `cloudsql_database`. `database`, `project_id`, `database_id`. +8. `cloudsql.googleapis.com/database/disk/write_ops_count`: Delta count of data disk write IO operations. `cloudsql_database`. `database`, `project_id`, `database_id`. +9. `cloudsql.googleapis.com/database/mysql/queries`: Delta count of statements executed by the server. `cloudsql_database`. `database`, `project_id`, `database_id`. +10. `cloudsql.googleapis.com/database/mysql/questions`: Delta count of statements sent by the client. `cloudsql_database`. `database`, `project_id`, `database_id`. +11. `cloudsql.googleapis.com/database/mysql/received_bytes_count`: Delta count of bytes received by MySQL process. `cloudsql_database`. `database`, `project_id`, `database_id`. +12. `cloudsql.googleapis.com/database/mysql/sent_bytes_count`: Delta count of bytes sent by MySQL process. `cloudsql_database`. `database`, `project_id`, `database_id`. +13. `cloudsql.googleapis.com/database/mysql/innodb_buffer_pool_pages_dirty`: Number of unflushed pages in the InnoDB buffer pool. `cloudsql_database`. `database`, `project_id`, `database_id`. +14. `cloudsql.googleapis.com/database/mysql/innodb_buffer_pool_pages_free`: Number of unused pages in the InnoDB buffer pool. `cloudsql_database`. `database`, `project_id`, `database_id`. +15. `cloudsql.googleapis.com/database/mysql/innodb_buffer_pool_pages_total`: Total number of pages in the InnoDB buffer pool. `cloudsql_database`. `database`, `project_id`, `database_id`. +16. `cloudsql.googleapis.com/database/mysql/innodb_data_fsyncs`: Delta count of InnoDB fsync() calls. `cloudsql_database`. `database`, `project_id`, `database_id`. +17. `cloudsql.googleapis.com/database/mysql/innodb_os_log_fsyncs`: Delta count of InnoDB fsync() calls to the log file. `cloudsql_database`. `database`, `project_id`, `database_id`. +18. `cloudsql.googleapis.com/database/mysql/innodb_pages_read`: Delta count of InnoDB pages read. `cloudsql_database`. `database`, `project_id`, `database_id`. +19. `cloudsql.googleapis.com/database/mysql/innodb_pages_written`: Delta count of InnoDB pages written. `cloudsql_database`. `database`, `project_id`, `database_id`. +20. `cloudsql.googleapis.com/database/mysql/open_tables`: The number of tables that are currently open. `cloudsql_database`. `database`, `project_id`, `database_id`. +21. `cloudsql.googleapis.com/database/mysql/opened_table_count`: The number of tables opened since the last sample. `cloudsql_database`. `database`, `project_id`, `database_id`. +22. `cloudsql.googleapis.com/database/mysql/open_table_definitions`: The number of table definitions currently cached. `cloudsql_database`. `database`, `project_id`, `database_id`. +23. `cloudsql.googleapis.com/database/mysql/opened_table_definitions_count`: The number of table definitions cached since the last sample. `cloudsql_database`. `database`, `project_id`, `database_id`. +24. `cloudsql.googleapis.com/database/mysql/innodb/dictionary_memory`: Memory allocated for the InnoDB dictionary cache. `cloudsql_database`. `database`, `project_id`, `database_id`. + + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| projectId | string | The Id of the Google Cloud project. | Yes | | +| query | string | The promql query to execute. | Yes | | + + +--- + +### list_active_queries + +Lists top N (default 10) ongoing queries from processlist and innodb_trx, ordered by execution time in descending order. Returns detailed information of those queries in json format, including process id, query, transaction duration, transaction wait duration, process time, transaction state, process state, username with host, transaction rows locked, transaction rows modified, and db schema. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| min_duration_secs | integer | Optional: Only show queries running for at least this long in seconds | No | `0` | +| limit | integer | Optional: The maximum number of rows to return. | No | `100` | + + +--- + +### list_table_fragmentation + +List table fragmentation in MySQL, by calculating the size of the data and index files and free space allocated to each table. The query calculates fragmentation percentage which represents the proportion of free space relative to the total data and index size. Storage can be reclaimed for tables with high fragmentation using OPTIMIZE TABLE. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| table_schema | string | (Optional) The database where fragmentation check is to be executed. Check all tables visible to the current user if not specified | No | `` | +| table_name | string | (Optional) Name of the table to be checked. Check all tables visible to the current user if not specified. | No | `` | +| data_free_threshold_bytes | integer | (Optional) Only show tables with at least this much free space in bytes. Default is 1 | No | `1` | +| limit | integer | (Optional) Max rows to return, default is 10 | No | `10` | + + +--- + +### list_table_stats + +Display table statistics including table size, total latency, rows read, rows written, read and write latency for entire instance, a specified database, or a specified table. Specifying a database name or table name filters the output to that specific db or table. Results are limited to 10 by default. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| table_schema | string | (Optional) The database where statistics is to be executed. Check all tables visible to the current user if not specified | No | `` | +| table_name | string | (Optional) Name of the table to be checked. Check all tables visible to the current user if not specified. | No | `` | +| sort_by | string | (Optional) The column to sort by | No | `` | +| limit | integer | (Optional) Max rows to return, default is 10 | No | `10` | +| connected_schema | string | (Optional) The connected db | No | | + + +--- + +### list_tables_missing_unique_indexes + +Find tables that do not have primary or unique key constraint. A primary key or unique key is the only mechanism that guaranttes a row is unique. Without them, the database-level protection against data integrity issues will be missing. + +#### Parameters + +| Name | Type | Description | Required | Default | +| :--- | :--- | :--- | :--- | :--- | +| table_schema | string | (Optional) The database where the check is to be performed. Check all tables visible to the current user if not specified | No | `` | +| limit | integer | (Optional) Max rows to return, default is 50 | No | `50` | + + +--- + diff --git a/skills/cloud-sql-mysql-monitor/scripts/get_query_metrics.js b/skills/cloud-sql-mysql-monitor/scripts/get_query_metrics.js new file mode 100755 index 0000000..016a423 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/get_query_metrics.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_query_metrics"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/get_query_plan.js b/skills/cloud-sql-mysql-monitor/scripts/get_query_plan.js new file mode 100755 index 0000000..d49e104 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/get_query_plan.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_query_plan"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/get_system_metrics.js b/skills/cloud-sql-mysql-monitor/scripts/get_system_metrics.js new file mode 100755 index 0000000..17adcb9 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/get_system_metrics.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "get_system_metrics"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/list_active_queries.js b/skills/cloud-sql-mysql-monitor/scripts/list_active_queries.js new file mode 100755 index 0000000..ca9e382 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/list_active_queries.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_active_queries"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/list_table_fragmentation.js b/skills/cloud-sql-mysql-monitor/scripts/list_table_fragmentation.js new file mode 100755 index 0000000..c055097 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/list_table_fragmentation.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_table_fragmentation"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/list_table_stats.js b/skills/cloud-sql-mysql-monitor/scripts/list_table_stats.js new file mode 100755 index 0000000..d22d90f --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/list_table_stats.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_table_stats"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); diff --git a/skills/cloud-sql-mysql-monitor/scripts/list_tables_missing_unique_indexes.js b/skills/cloud-sql-mysql-monitor/scripts/list_tables_missing_unique_indexes.js new file mode 100755 index 0000000..3e3cb68 --- /dev/null +++ b/skills/cloud-sql-mysql-monitor/scripts/list_tables_missing_unique_indexes.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const { spawn, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const os = require('os'); + +const toolName = "list_tables_missing_unique_indexes"; +const configArgs = ["--prebuilt", "cloud-sql-mysql"]; + +const OPTIONAL_VARS_TO_OMIT_IF_EMPTY = [ + 'CLOUD_SQL_MYSQL_USER', + 'CLOUD_SQL_MYSQL_PASSWORD', + 'CLOUD_SQL_MYSQL_IP_TYPE', +]; + + +function mergeEnvVars(env) { + if (process.env.GEMINI_CLI === '1') { + const envPath = path.resolve(__dirname, '../../../.env'); + if (fs.existsSync(envPath)) { + const envContent = fs.readFileSync(envPath, 'utf-8'); + envContent.split('\n').forEach(line => { + const trimmed = line.trim(); + if (trimmed && !trimmed.startsWith('#')) { + const splitIdx = trimmed.indexOf('='); + if (splitIdx !== -1) { + const key = trimmed.slice(0, splitIdx).trim(); + let value = trimmed.slice(splitIdx + 1).trim(); + value = value.replace(/(^['"]|['"]$)/g, ''); + if (env[key] === undefined) { + env[key] = value; + } + } + } + }); + } + } else if (process.env.CLAUDECODE === '1') { + const prefix = 'CLAUDE_PLUGIN_OPTION_'; + for (const key in process.env) { + if (key.startsWith(prefix)) { + env[key.substring(prefix.length)] = process.env[key]; + } + } + } +} + +function prepareEnvironment() { + let env = { ...process.env }; + let userAgent = "skills"; + if (process.env.GEMINI_CLI === '1') { + userAgent = "skills-geminicli"; + } else if (process.env.CLAUDECODE === '1') { + userAgent = "skills-claudecode"; + } else if (process.env.CODEX_CI === '1') { + userAgent = "skills-codex"; + } + mergeEnvVars(env); + + OPTIONAL_VARS_TO_OMIT_IF_EMPTY.forEach(varName => { + if (env[varName] === '') { + delete env[varName]; + } + }); + + + return { env, userAgent }; +} + +function main() { + const { env, userAgent } = prepareEnvironment(); + const args = process.argv.slice(2); + + const command = os.platform() === 'win32' ? 'npx.cmd' : 'npx'; + const processedArgs = os.platform() === 'win32' ? args.map(arg => arg.includes('"') ? '"' + arg.replace(/"/g, '""') + '"' : arg) : args; + const npxArgs = ["--yes", "@toolbox-sdk/server@1.1.0", "--log-level", "error", ...configArgs, "invoke", toolName, "--user-agent-metadata", userAgent, ...processedArgs]; + + const child = spawn(command, npxArgs, { shell: os.platform() === 'win32', stdio: 'inherit', env }); + + + child.on('close', (code) => { + process.exit(code); + }); + + child.on('error', (err) => { + console.error("Error executing toolbox:", err); + process.exit(1); + }); +} + +main(); From fffe1277ab30644bb865bb31b1aef78181caca90 Mon Sep 17 00:00:00 2001 From: Twisha Bansal <58483338+twishabansal@users.noreply.github.com> Date: Wed, 15 Apr 2026 11:16:34 +0530 Subject: [PATCH 2/4] Delete skills-gen/db_groups.md --- skills-gen/db_groups.md | 253 ---------------------------------------- 1 file changed, 253 deletions(-) delete mode 100644 skills-gen/db_groups.md diff --git a/skills-gen/db_groups.md b/skills-gen/db_groups.md deleted file mode 100644 index 379b677..0000000 --- a/skills-gen/db_groups.md +++ /dev/null @@ -1,253 +0,0 @@ -# Refactoring Database Toolsets in MCP Toolbox - -## **Overview** - -Current telemetry indicates that exposing agents to more than [\~20 tools leads to a collapse in reasoning accuracy (\<40%)](https://docs.google.com/document/d/1gg47e4qcXJlZ2Zd1uOl5LPtrevGP7p1RTealXk288k0/edit?tab=t.0). To align with the [**MCP Toolbox Style Guide**](https://docs.google.com/document/d/1M_W98KfCt_mfM0vJjCkd1wU9fn6EcluWhtY0qKOAHxQ/edit?resourcekey=0-21OnBgfslm3l4NJfagLveA&tab=t.0), we must refactor our "monolithic" prebuilt values into sets of **5–8 tools** organized by **Critical User Journey**. - -### **Current Toolset Size Analysis** - -| Database Source | Current Tool Count | Status | Primary Reason for Bloat | -| :-------------------------------------- | :----------------- | :------------- | :---------------------------------------------------------------------------------------------------------------------------------------- | -| **Looker** | 33 | 🔴 Critical | Includes full SDK (Dev, Admin, and Query). | -| **AlloyDB Omni** | 31 | 🔴 Critical | High overlap with Postgres \+ Columnar specific tools. | -| **AlloyDB Postgres** | 29 | 🔴 Critical | Mixed Data, Maintenance, and Admin tools. | -| **AlloyDB Admin** | 10 | 🟡 High | Combined Cluster, Instance, and User management. | -| **Cloud SQL for PostgreSQL (Postgres)** | 29 | 🔴 Critical | Monolithic engine for data and maintenance. | -| **Cloud SQL Postgres Admin** | 11 | 🟡 High | Combined Instance and User management. | -| **BigQuery** | 10 | 🟡 High | Mixes Metadata discovery with ML Analytics. | -| **Cloud SQL MySQL Admin** | 10 | 🟡 High | Combined Instance and User management. | -| **Cloud SQL for MySQL** | 6 | ✅ Optimal | Mixes Admin lifecycle with Data exploration. | -| **Cloud SQL SQL Server Admin** | 10 | 🟡 High | Combined Instance and User management. | -| **Cloud SQL SQL Server** | 2 | ✅ Optimal | | -| **Observability (AlloyDB, Cloud SQL)** | 2 | ✅ Optimal | | -| **Healthcare API** | 15 | 🟡 High→ 👌 OK | Combined FHIR and DICOM protocols. Uses toolsets: cloud_healthcare_dataset_tools cloud_healthcare_fhir_tools cloud_healthcare_dicom_tools | -| **Firestore** | 9 | 🟡 High | Mixes Data operations with Rules management. | -| **Spanner** | 4 | ✅ Optimal | | -| **Dataplex** | 3 | ✅ Optimal | | - -### **Context** - -This doc will propose organizing prebuilt tools into discrete tool sets optimized for use cases. This can be exposed in Toolbox in a couple ways 1\) a new prebuilt toolset i.e. ./toolbox \--prebuilt alloydb-postgres-new-name or 2\) a named toolset within a prebuilt toolset i.e. ./toolbox \--prebuilt alloydb-postgres and use MCP server endpoint /mcp/{toolset_name}. Tools can be in multiple tool sets in order to have coverage of use case tasks. - -Note: please see the [Dictionary](?tab=t.dxjmda6q2112) and [Additional Context](?tab=t.dxjmda6q2112) for more background information. - -Related work includes the transition from [**Toolsets** to **Groups**](https://docs.google.com/document/d/1KUw2F1_kuHffsB2RGau6Ol0puKnMh_GMF3uO8UUKwh0/edit?resourcekey=0-ixAkamQ_UUvLPg1yzcyZ7w&tab=t.0) is a strategic architectural shift designed to create a unified collection for all MCP primitives—specifically integrating both **tools, resources, and prompts** into a single logical entity. Groupings allow for logical organization of functionality (e.g. email, calendar, etc). They also enable client-side filtering \- allowing the client or user to select only the relevant functionality for a specific task. This reduces context overload and minimizes the number of tokens sent to the LLM. -This evolution directly supports the generation of **Agent Skills**, as skills are currently produced on a per-toolset (now per-group) basis. By using the new description field in a Group definition, Toolbox can automatically populate the server instructions for the MCP server and the \--description flag required to define a skill's purpose and strategy in its SKILL.md file. - -I recommend approach 2\. Users can still get all tools in the default toolset but can limit by toolset for better performance. - -| Topic / Concept | \#1 New Prebuilt Flags (--prebuilt alloydb-dbadmin) | \#2 Named Toolsets within Prebuilt (/mcp/{toolset_name}) | -| :------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Discoverability** | Visibility in documentation and \--help menus. | Visible in logs and UI. Requires users to know the specific MCP endpoint path or consult external API docs. Currently no mechanism to list available toolsets via the MCP endpoint. | -| **Granularity/Complexity** | Each flag represents a "bundled" identity. Can mix-and-match flags. **Flat:** Avoids hierarchical confusion but leads to "flag explosion" as more specialized use cases are added. | Allows a single server to host multiple logical groupings (e.g., read-only, schema-mgmt, data-ops). **Hierarchical:** Supports the MCP "Primitive Grouping" standard for organized, named collections within a single server. | -| **User Experience (UX)** | **Simple & Explicit:** One command yields one specific set of tools. Very "Unix-style" and predictable. | **Flexible but Complex:** The default endpoint might provide "everything," while named endpoints provide subsets. Could be confusing to debug. | -| **System Overhead** | **Higher:** Requires separate process instances or flag-parsing logic for every "new" prebuilt name added. | **Lower:** A single running MCP server can multiplex multiple toolsets via routing, saving resources. | -| **Tool Overlap** | Can lead to code duplication or complex symlinking in the backend to ensure a tool exists in two "flags." | Naturally supports overlap; the server logic simply maps the same function to multiple endpoint aliases. | -| **Skill Generation** | **Manual-ish:** Requires a specific flag per persona; easier to map one "Flag" to one "Skill Folder". | **Automated:** One prebuilt server can dynamically export multiple Groups as different Skills via its internal registry. | -| **Backward Compatibility** | **Low:** Requires breaking changes of toolsets | **Strong:** Does not impact existing MCP client-server URL structures. | - -This doc will also provide a comparison to the current OneMCP tool sets in: [Data Cloud OneMCP: Tools and Commitment Dashboard](https://docs.google.com/spreadsheets/d/1rXWhXONd5xqCpU-oJJ8eTSWtL4QVm9yuavjur-p_NXY/edit?gid=1286305679#gid=1286305679) - -### **Supporting Toolsets for STDIO** - -We will need a flag like `./toolbox --prebuilt alloydb-postgres/ops` where alloydb-postgres is the tool-source name and ops is the toolset name. - -## **Recommendation for Toolsets and Tool Names by Source** - -### **AlloyDB for PostgreSQL** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :---------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **alloydb-postgres-admin** | create_cluster get_cluster list_clusters create_instance get_instance list_instances database_overview wait_for_operation | 8 | Use these tools when you need to provision new AlloyDB clusters and instances, monitor their creation status, and retrieve high-level configuration or health data for the environment. | -| **access-management** | create_user, list_users, get_user, list_roles, list_pg_settings, database_overview | 6 | Use these tools when you need to manage database users, inspect permissions and roles, and verify global configuration parameters related to security and access control. | -| **alloydb-postgres-data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 8 | Use these tools when you need to explore the database schema, identify objects like views and triggers, and execute custom SQL queries to interact with your data. | -| **alloydb-postgres-monitor** | list_active_queries, list_query_stats, get_query_plan, get_query_metrics, get_system_metrics, long_running_transactions, list_locks, list_database_stats | 7 | Use these tools when you need to troubleshoot slow performance, analyze query execution plans, identify resource-heavy processes, and monitor system-level PromQL metrics. | -| **alloydb-postgres-health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, get_column_cardinality, list_autovacuum_configurations, list_tablespaces, database_overview, get_instance | 6 | Use these tools when you need to optimize storage, identify index issues, analyze table statistics, or manage autovacuum and tablespace configurations to maintain peak database health. | -| **alloydb-postgres-optimize** | list_available_extensions, list_installed_extensions, list_memory_configurations, list_pg_settings, database_overview, get_cluster | 8 | Use these tools when you need to discover and manage PostgreSQL extensions or fine-tune engine-level settings such as memory allocation and server configuration parameters. | -| **replication** | replication_stats, list_replication_slots, list_publication_tables, list_instances, get_instance, database_overview | 6 | Use these tools when you need to monitor replication health, manage sync states between nodes, and ensure the high availability and data distribution of your AlloyDB cluster. | - -**OneMCP Comparison:** - -- **Unique to MCP Toolbox:** Deep columnar recommendations (list_columnar_recommended_columns) and maintenance insights. -- **Missing in Toolbox:** delete_instance, update_instance, clone_cluster, export_data, import_data. -- **Alignment:** Both support core cluster and instance CRUD. - -### **AlloyDB Omni** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :-------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 8 | Use these tools when you need to explore the database structure, identify schema objects like views and triggers, and execute SQL queries to interact with your data. | -| **performance** | execute_sql, get_query_plan, list_query_stats, get_column_cardinality, list_table_stats, list_database_stats, list_active_queries | 7 | Use these tools when you need to analyze query performance, generate execution plans, check table/column statistics, and monitor overall database activity. | -| **monitor** | database_overview, list_active_queries, long_running_transactions, list_locks, list_database_stats, list_pg_settings | 7 | Use these tools when you need to troubleshoot production issues by identifying locks, tracking long-running transactions, and getting a high-level view of server state. | -| **optimize** | list_pg_settings, list_memory_configurations, list_available_extensions, list_installed_extensions, list_autovacuum_configurations, list_columnar_configurations, list_columnar_recommended_columns | 7 | Use these tools when you need to fine-tune the database engine settings, manage extensions, or optimize the columnar engine for better analytical performance. | -| **health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, list_tablespaces, database_overview, list_autovacuum_configurations | 6 | Use these tools when you need to audit database health, identify storage bloat, find broken indexes, and verify tablespace or maintenance configurations. | -| **replication** | replication_stats, list_replication_slots, list_publication_tables, database_overview | 4 | Use these tools when you need to monitor the health of database replication, manage sync states between nodes, and audit publication tables for distributed setups. | -| **access-control** | list_roles, list_pg_settings, database_overview | 3 | Use these tools when you need to manage user roles, inspect permissions, and verify security-related configuration parameters. | - -**OneMCP Comparison:**OneMCP can not support Omni - -### **BigQuery** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :--------------------- | :----------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **bigquery-data** | execute_sql list_dataset_ids list_table_ids get_dataset_info get_table_info search_catalog | 6 | Use these tools when you need to handle large-scale data exploration and dataset management. Use when users need to find data assets or run SQL at scale. Provides metadata discovery and query execution across the data warehouse. | -| **bigquery-analytics** | analyze_contribution ask_data_insights forecast search_catalog | 3 | Use these tools when you need to handle advanced data intelligence and predictive tasks. Use when a user asks "why" data changed or needs future projections. Provides automated insight generation and time-series forecasting. | - -**OneMCP Comparison:** - -- **Unique to MCP Toolbox:** Advanced analysis tools (analyze_contribution, forecast, search_catalog). -- **Parity:** High overlap on core metadata (list_dataset_ids, list_table_ids, get_table_info). - -### **Cloud SQL PostgreSQL & Standalone** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :-------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **admin** | create_instance, get_instance, list_instances, create_database, list_databases, create_user, wait_for_operation, clone_instance | 7 | Use these tools when you need to provision new Cloud SQL instances, create databases and users, clone existing environments, and monitor the progress of long-running operations. | -| **lifecycle** | create_backup, restore_backup, postgres_upgrade_precheck, wait_for_operation, database_overview, get_instance, list_instances | 8 | Use these tools when you need to manage the lifecycle of your instances, including performing backups and restores, checking major version upgrade compatibility, and monitoring overall instance status. | -| **data** | execute_sql, list_tables, list_views, list_schemas, list_triggers, list_indexes, list_sequences, list_stored_procedure | 7 | Use these tools when you need to explore the database structure, discover schema objects like views or stored procedures, and execute custom SQL queries to interact with your data. | -| **monitor** | get_system_metrics, get_query_metrics, list_query_stats, get_query_plan, list_database_stats, list_active_queries, long_running_transactions, list_locks | 6 | Use these tools when you need to troubleshoot performance bottlenecks, analyze query execution plans, identify resource-heavy processes, and monitor system-level PromQL metrics. | -| **health** | list_top_bloated_tables, list_invalid_indexes, list_table_stats, get_column_cardinality, list_autovacuum_configurations, list_tablespaces, database_overview, list_pg_settings | | Use these tools when you need to audit database health, identify storage bloat, find invalid indexes, analyze table statistics, and manage maintenance configurations like autovacuum. | -| **view-config** | list_available_extensions, list_installed_extensions, list_memory_configurations, list_pg_settings, database_overview, get_instance | | Use these tools when you need to discover and manage PostgreSQL extensions or fine-tune engine-level settings such as memory allocation and server configuration parameters. | -| **replication** | replication_stats, list_replication_slots, list_publication_tables, list_roles, list_pg_settings, database_overview | | Use these tools when you need to monitor replication health, manage sync states between nodes, and audit database roles and security settings to ensure environment integrity. | - -**OneMCP Comparison:** - -- **Unique to MCP Toolbox:** Extension management (list_available_extensions) and query plan analysis. -- **Missing in Toolbox:** PostgreSQL lifecycle management (delete_instance, update_instance). - -### **Cloud SQL MySQL & Standalone** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :-------------------- | :--------------------------------------------------------------------------------------------------------------------------------------- | :---- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **admin** | create_instance get_instance list_instances create_database list_databases create_user wait_for_operation | 7 | Use these tools when you need to provision new Cloud SQL for MySQL instances, create databases and users, clone existing environments, and monitor the progress of infrastructure operations. | -| **data** | execute_sql, list_tables, get_query_plan, list_active_queries | 6 | Use these tools when you need to explore your database schema, execute SQL queries to interact with your data, and inspect how MySQL plans to execute your statements. | -| **monitor** | get_query_plan, list_active_queries, get_query_metrics, get_system_metrics, list_table_fragmentation, list_tables_missing_unique_indexes | 2 | Use these tools when you need to troubleshoot slow queries, analyze system-level PromQL metrics, and identify structural performance issues like table fragmentation or missing unique indexes. | -| **lifecycle** | create_backup restore_backup clone_instance list_instances wait_for_operation | 5 | Use these tools when you need to manage the durability and safety of your data by creating backups, restoring from previous states, or cloning instances for recovery and testing. | - -**OneMCP Comparison:** - -- **Unique to MCP Toolbox:** Maintenance and observability insights (list_table_fragmentation, get_query_plan). -- **Unique to OneMCP:** Instance lifecycle (delete_instance, update_instance), import_data, export_data, and user management (delete_user). - -### **Cloud SQL SQL Server & Standalone** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :-------------------- | :-------------------------------------------------------------------------------------------------------- | :---- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **admin** | create_instance get_instance list_instances create_database list_databases create_user wait_for_operation | 7 | Use these tools when you need to provision new Cloud SQL for SQL Server instances, create databases and users, clone existing environments, and monitor the progress of long-running operations. | -| **data** | execute_sql list_tables | 5 | Use these tools when you need to explore the database schema, execute SQL queries to interact with your data, and monitor system-level performance metrics using PromQL queries. | -| **monitor** | get_system_metrics | | Use these tools when you need to troubleshoot slow queries and analyze system-level PromQL metrics. | -| **lifecycle** | create_backup restore_backup clone_instance list_instances wait_for_operation | 5 | Use these tools when you need to manage the lifecycle and durability of your data, including creating backups, restoring from existing backups, and cloning instances for testing or migration. | - -**OneMCP Comparison:** - -- **Parity:** Core execution and administrative tools are aligned. - -### **Looker & Conversational Analytics** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :-------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :---- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **looker-modeling** | get_models get_explores get_dimensions get_measures get_filters get_parameters | 6 | Handles LookML semantic layer discovery. Use when the user needs to understand what data fields are available for analysis. Provides detailed exploration of dimensions, measures, and model structures. | -| **looker-content** | get_looks run_look make_look get_dashboards run_dashboard make_dashboard add_dashboard_element add_dashboard_filter | 8 | Manages user-facing BI assets like Looks and Dashboards. Use for creating, searching, or executing saved visualizations. Provides full lifecycle management for reporting content. | -| **looker-dev** | get_projects get_project_files get_project_file create_project_file update_project_file delete_project_file validate_project dev_mode | 8 | Focused on the developer workflow and LookML file management. Use for code changes, validation, and project exploration. Provides file-level CRUD operations and syntax checking. | -| **looker-ops** | health_pulse health_analyze health_vacuum get_connections get_connection_schemas get_connection_databases get_connection_tables get_connection_table_columns | 8 | Handles platform maintenance and database connection audits. Use for instance health checks or database schema discovery. Provides connectivity management and LookML cleanup suggestions. | - -**OneMCP Comparison:** Looker is unsupported by OneMCP due to no OP API - -### - -### **Firestore** - -| Proposed Toolset Name | Recommended Tools | Count | Toolset Description | -| :--------------------- | :--------------------------------------------------------------------------------------------- | :---- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| **firestore-data** | get_documents add_documents update_document delete_documents query_collection list_collections | 6 | Handles NoSQL document operations and collection hierarchy exploration. Use for CRUD tasks and data retrieval. Provides flexible document manipulation and structured querying. | -| **firestore-security** | get_rules validate_rules | 2 | Manages access control and security compliance. Use when auditing permissions or deploying new security logic. Provides rule retrieval and syntax validation. | - -**OneMCP Comparison:** - -- **Unique to OneMCP:** Field-level management (field_get, field_update), backup management (backup_get, backup_delete), and schema/insights tools. -- **Missing in Toolbox:** database creation, import_data, export_data, and backup_schedule management. - -### **Healthcare API** - -No changes required. Already use toolsets. - -**OneMCP Comparison:** - -- OneMCP does not currently list a specific Healthcare API toolset. MCP Toolbox provides a specialized competitive advantage here. - -### **Spanner** - -_Includes: GoogleSQL and PostgreSQL dialects._ - -| Proposed Toolset | Recommended Tool Names | -| :------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **spanner-data** (No changes required) | list_tables list_graphs execute_sql execute_dql_sql | -| **spanner-admin** (Future/Unplanned) | create_instance get_instance update_instance delete_instance list_instances create_database get_database update_schema drop_database get_operation_status | - -**OneMCP Comparison:** - -- **Unique to OneMCP:** Session management (create_session, commit). -- **Missing in Toolbox:** Admin/Lifecycle tools - -### **Dataplex** - -| Proposed Toolset | Recommended Tool Names | -| :------------------------------------------- | :------------------------------------------------------------------ | -| **dataplex-discovery** (No changes required) | search_entries lookup_entry search_aspect_types | -| **dataplex-quality** (Coming soon) | get_data_profile get_data_quality run_profile_scan run_quality_scan | - -**OneMCP Comparison:** - -- **Unique to OneMCP:** get_lineage_graph - -## **Additional Work** - -### **GitHub PR Check** - -We should add a test or a Gemini CLI review for keeping track of toolset sizes. - -### **Versioning Policy** - -We need to write a versioning guide for toolsets to answer questions like are toolsets changes breaking changes? - -## **Appendix** - -### **3P Toolsets** - -The following toolsets are already compliant with size limits and require no structural changes: - -- **ClickHouse**: execute_sql, list_databases, list_tables -- **Elasticsearch**: execute_query -- **Neo4j**: execute_cypher, get_schema -- **Spark**: list_batches, get_batch, cancel_batch, create_pyspark_batch, create_spark_batch -- **SQLite**: execute_sql, list_tables -- **SingleStore / Snowflake / MindsDB / OceanBase**: execute_sql, list_tables - -| Current Name | Recommended Change | Reason | -| :------------------ | :------------------------ | :------------------------------- | -| mindsdb-execute-sql | execute_sql | Remove redundant prefix. | -| mindsdb-sql | execute_parameterized_sql | Remove prefix; increase clarity. | -| execute_esql_query | execute_query | Remove engine-specific acronyms. | -| fhir_patient_search | search_patients | Outcome-oriented naming. | - -### **No MCP Toolbox Support** - -#### **Dataform OneMCP** - -MCP Toolbox currently supports dataform-compile. - -| Proposed Toolset | Recommended Tool Names | -| :----------------- | :----------------------------------------------------------------------------------- | -| **dataform-repo** | create_repository, delete_repository, list_repositories | -| **dataform-files** | search_file, rename_file, write_file, delete_file, read_file, list_files_and_folders | -| **dataform-ops** | compile_pipeline, trigger_pipeline | - -#### **Bigtable OneMCP** - -MCP Toolbox currently supports bigtable-sql. - -| Proposed Toolset | Recommended Tool Names | -| :---------------- | :-------------------------------------------------------------------------- | -| **bigtable-data** | list_instances, get_instance_info, list_tables, get_table_info, execute_sql | - -[image1]: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABsAAAAbCAMAAAC6CgRnAAADAFBMVEX///+5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+a5Q+bLcuzYlvHcofPpxffu0fnGZuvTiu/hrfT79P7///+9T+jlufb26PzPfu7CW+ny3Prn2uy+msydaLKVW6t8NZjfzeb38/mMTqWEQp7OtNmldLitgb/WwN+DN6KLOauaPL+pQNKuQNiePcSHOKd/Np2mP87Gp9KxQdy1QuG2jsWiPsnx5/Xv5vKTOrXt0Pjq0PTmz+/bwuXXtuO2Td7q2/HizurFnNW+kNCoa8CWO7qPOrG4WN3NqdoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACsRY1SAAAAEHRSTlMAEECAoMDQMFCwcJDg8GAggbhKsQAAANBJREFUeF6NkjkOAjEMRR2GYYkbGs7B/S9Dj9yEYmiQELG/s8wMAn6RxHnyEjuBqk7YUr0IcwA5djZDBRpbEJVSZR+QwU1vh7Gkh9ncIraHmykzR05UTlOJmRFzO5u2vmckdKapmlkBIaNHcoqozS+Od6KZL/JZ6U/LI8KlW2AXWymCstuIeQRjJJpo17GbLodBzKgvBbvq8xYk58M4WEtU0qHSFzGHnlCbA6+IMgSV2s2ipLUka5t25DX4/d5m+23uv/4LrShq+ON/qhw7yHoD2LYrfWJ+HeEAAAAASUVORK5CYII= From 9a11991f17dd37fdd70fe8a801a2007d5ac4af73 Mon Sep 17 00:00:00 2001 From: Twisha Bansal <58483338+twishabansal@users.noreply.github.com> Date: Wed, 15 Apr 2026 11:16:44 +0530 Subject: [PATCH 3/4] Delete skills-gen/skill-support.md --- skills-gen/skill-support.md | 74 ------------------------------------- 1 file changed, 74 deletions(-) delete mode 100644 skills-gen/skill-support.md diff --git a/skills-gen/skill-support.md b/skills-gen/skill-support.md deleted file mode 100644 index 8a8f723..0000000 --- a/skills-gen/skill-support.md +++ /dev/null @@ -1,74 +0,0 @@ -You're a senior software engineer who's working to support skills in extensions for Google Data Cloud. - -To support that create the following PRs in your repo. For each PR, first make the desired changes and then create a PR in the github repo. Please ensure that the PRs have good branch names as well as PR titles and descriptions. Directly create PRs in the repo. DO NOT CREATE ANY FORKS. All PRs should be created in a draft state. - -# PR1: Add support for skills - -1. Generate new skills using the latest toolbox binary (v1.1.0). Toolbox installation command: - -```bash -export VERSION=1.1.0 -curl -L -o toolbox2 https://storage.googleapis.com/mcp-toolbox-for-databases/v$VERSION/darwin/arm64/toolbox -chmod +x toolbox2 -``` - -Then set the required env vars for configuration. You can -find those in the configuration section in the readme file. Eg. https://github.com/gemini-cli-extensions/alloydb?tab=readme-ov-file#configuration. - -Command example to generate skills for a toolset: - -```bash -./toolbox2 --prebuilt cloud-sql-postgres skills-generate --name "cloud-sql-postgres-health" --description "Use these skills when you need to audit database health, identify storage bloat, find invalid indexes, analyze table statistics, and manage maintenance configurations like autovacuum." --toolset=monitor --license-header "// Copyright 2026 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the \"License\"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an \"AS IS\" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License." --additional-notes="Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence." --additional-notes="Note: The scripts automatically load the environment variables from various .env files. Do not ask the user to set vars unless skill executions fails due to env var absence." -``` - -Write the commands for all tools in generate-skills-repo.sh script. Do not commit this file. - -Remember to generate skills for all toolsets mentioned in https://github.com/googleapis/mcp-toolbox/tree/main/internal/prebuiltconfigs/tools for the source. Eg. AlloyDB Source: https://github.com/googleapis/mcp-toolbox/blob/main/internal/prebuiltconfigs/tools/alloydb-postgres.yaml. - -Find list of sources and their tools grouping along with toolset descriptions [here](./db_groups.md). - -After skills generation, cross check the generated skills from the [source yaml files](https://github.com/googleapis/mcp-toolbox/tree/main/internal/prebuiltconfigs/tools) and the [document](./db_groups.md) provided above. - -Now, for all generated skills, replace skill descriptions from using tools -> skills. - -2. Add skills validation workflow. Eg: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/.github/workflows/skills-validate.yml and https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/.github/workflows/skills-validate-fallback.yml. - -3. Remove the MCP servers from the gemini-extension.json file. - -# PR2: Remove packaging workflow - -1. Remove the github package and upload assets workflow: .github/workflows/package-and-upload-assets.yml. - -# PR3: Add Claude code plugin config - -1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/137. Ensure that the plugin.json user config is consistent with the env vars in the gemini-extension.json file. - -# PR4: Add Codex plugin config - -1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/138. Ensure that the system prompt you're using here is consistent with the gemini-extension context file. - -# PR5: Auto update plugin versions using rp - -1. Replicate this PR: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/155. Also ensure that "README.md" is present in extra-files section for release-please. - -# PR6: Make skill docs changes: - -1. Find the context file in gemini-extension.json, for eg. CLOUD-SQL-POSTGRESQL.md for the CLOUD SQL repo: https://github.com/gemini-cli-extensions/cloud-sql-postgresql and make changes analogous to what was done in https://github.com/gemini-cli-extensions/cloud-sql-postgresql/pull/109/. - -2. Make changes to the README.md and DEVELOPER.md files. Use https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/DEVELOPER.md and https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/README.md on potential changes. - -3. Check for any "tools" mention in the repository that needs to be updated to skills. - -4. Ensure that you have covered updates on the README on how to use the extensions with Gemini CLI, Claude Code, Codex and Antigravity. Be very through with looking into this: https://github.com/gemini-cli-extensions/cloud-sql-postgresql/blob/main/README.md and making updates. Notice that this readme has a table of contents and collapsible sections for claude code etc. From 600b70bfa6b787577d53e3ed6e1b49106b54524b Mon Sep 17 00:00:00 2001 From: Twisha Bansal <58483338+twishabansal@users.noreply.github.com> Date: Wed, 15 Apr 2026 11:17:13 +0530 Subject: [PATCH 4/4] Delete .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) delete mode 100644 .gitignore diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 4f20322..0000000 --- a/.gitignore +++ /dev/null @@ -1 +0,0 @@ -skills-gen/creds.sh \ No newline at end of file