Skip to content

Commit df31327

Browse files
committed
revert generator and librarian
1 parent 6a5845d commit df31327

11 files changed

Lines changed: 368 additions & 247 deletions

File tree

.generator/Dockerfile

Lines changed: 17 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -40,27 +40,25 @@ RUN apt-get update && \
4040
&& apt-get clean && \
4141
rm -rf /var/lib/apt/lists/*
4242

43-
# `make altinstall` is used to prevent replacing the system's default python binary.
43+
ENV PYTHON_VERSION=3.14
44+
4445
# The full Python version, including the minor version, is needed for download/install
45-
# TODO(https://github.com/googleapis/librarian/issues/2945): Remove `3.13` when the linked issue is resolved.
46-
RUN for PYTHON_VERSION_WITH_MINOR in 3.13.9 3.14.0; do \
47-
wget https://www.python.org/ftp/python/${PYTHON_VERSION_WITH_MINOR}/Python-${PYTHON_VERSION_WITH_MINOR}.tgz && \
46+
ENV PYTHON_VERSION_WITH_MINOR=3.14.2
47+
48+
# `make altinstall` is used to prevent replacing the system's default python binary.
49+
RUN wget https://www.python.org/ftp/python/${PYTHON_VERSION_WITH_MINOR}/Python-${PYTHON_VERSION_WITH_MINOR}.tgz && \
4850
tar -xvf Python-${PYTHON_VERSION_WITH_MINOR}.tgz && \
4951
cd Python-${PYTHON_VERSION_WITH_MINOR} && \
5052
./configure --enable-optimizations --prefix=/usr/local && \
5153
make -j$(nproc) && \
5254
make altinstall && \
5355
cd / && \
54-
rm -rf Python-${PYTHON_VERSION_WITH_MINOR}* \
55-
; done
56+
rm -rf Python-${PYTHON_VERSION_WITH_MINOR}*
57+
5658

57-
# Install pip for each python version
58-
# TODO(https://github.com/googleapis/librarian/issues/2945): Remove `3.13` when the linked issue is resolved.
59-
RUN for PYTHON_VERSION in 3.13 3.14; do \
60-
wget --no-check-certificate -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' && \
59+
RUN wget --no-check-certificate -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' && \
6160
python${PYTHON_VERSION} /tmp/get-pip.py && \
62-
rm /tmp/get-pip.py \
63-
; done
61+
rm /tmp/get-pip.py
6462

6563
# Download/extract protoc
6664
RUN wget https://github.com/protocolbuffers/protobuf/releases/download/v25.3/protoc-25.3-linux-x86_64.zip
@@ -112,32 +110,20 @@ COPY --from=builder synthtool /synthtool
112110
COPY --from=builder /usr/local/bin/python${PYTHON_VERSION_DEFAULT} /usr/local/bin/
113111
COPY --from=builder /usr/local/lib/python${PYTHON_VERSION_DEFAULT} /usr/local/lib/python${PYTHON_VERSION_DEFAULT}
114112

115-
# TODO(https://github.com/googleapis/librarian/issues/2945): Remove `3.13` when the linked issue is resolved.
116-
COPY --from=builder /usr/local/bin/python3.13 /usr/local/bin
117-
COPY --from=builder /usr/local/lib/python3.13 /usr/local/lib/python3.13
118-
119113
# Set the working directory in the container.
120114
WORKDIR /app
121115

122-
# --- CACHE CHECKPOINT ---
123-
# Everything above this line (including the pip install below) will be cached
124-
# as long as 'requirements.in' doesn't change.
116+
# Install dependencies of the CLI such as click.
117+
# Install gapic-generator which is used to generate libraries.
118+
# Install nox which is used for running client library tests.
119+
# Install starlark-pyo3 which is used to parse BUILD.bazel files.
125120
COPY .generator/requirements.in .
126121
RUN python${PYTHON_VERSION_DEFAULT} -m pip install -r requirements.in
127-
128-
# --- LOCAL OVERRIDES START HERE ---
129-
# By putting these instructions last, only these steps re-run when you change code.
130-
131-
# 1. Overwrite the 'synthtool' directory with your local version
132-
# Note: This overwrites the version copied from 'builder' earlier.
133-
COPY synthtool /synthtool
134122
RUN python${PYTHON_VERSION_DEFAULT} -m pip install /synthtool
135123

136-
# 2. Copy and install your local 'gapic-generator-python'
137-
COPY gapic-generator-python /gapic-generator-python
138-
RUN python${PYTHON_VERSION_DEFAULT} -m pip install /gapic-generator-python
139-
140-
# --- FINAL CONFIGURATION ---
124+
# Install build which is used to get the metadata of package config files.
125+
COPY .generator/requirements.in .
126+
RUN python${PYTHON_VERSION_DEFAULT} -m pip install -r requirements.in
141127

142128
# Copy the CLI script into the container.
143129
COPY .generator/cli.py .
@@ -147,5 +133,4 @@ RUN chmod a+rx ./cli.py
147133
COPY .generator/parse_googleapis_content.py .
148134
RUN chmod a+rx ./parse_googleapis_content.py
149135

150-
ENV ENABLE_PERF_LOGS=1
151136
ENTRYPOINT ["python3.14", "./cli.py"]

.generator/cli.py

Lines changed: 42 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import subprocess
2424
import sys
2525
import tempfile
26-
import time
2726
import yaml
2827
from datetime import date, datetime
2928
from functools import lru_cache
@@ -32,40 +31,6 @@
3231
import build.util
3332
import parse_googleapis_content
3433

35-
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
36-
37-
import functools
38-
39-
PERF_LOGGING_ENABLED = os.environ.get("ENABLE_PERF_LOGS") == "1"
40-
41-
if PERF_LOGGING_ENABLED:
42-
perf_logger = logging.getLogger("performance_metrics")
43-
perf_logger.setLevel(logging.INFO)
44-
perf_handler = logging.FileHandler("/tmp/performance_metrics.log", mode='w')
45-
perf_formatter = logging.Formatter('%(asctime)s | %(message)s', datefmt='%H:%M:%S')
46-
perf_handler.setFormatter(perf_formatter)
47-
perf_logger.addHandler(perf_handler)
48-
perf_logger.propagate = False
49-
50-
def track_time(func):
51-
"""
52-
Decorator. Usage: @track_time
53-
If logging is OFF, it returns the original function (Zero Overhead).
54-
If logging is ON, it wraps the function to measure execution time.
55-
"""
56-
if not PERF_LOGGING_ENABLED:
57-
return func
58-
59-
@functools.wraps(func)
60-
def wrapper(*args, **kwargs):
61-
start_time = time.perf_counter()
62-
try:
63-
return func(*args, **kwargs)
64-
finally:
65-
duration = time.perf_counter() - start_time
66-
perf_logger.info(f"{func.__name__:<30} | {duration:.4f} seconds")
67-
68-
return wrapper
6934

7035
try:
7136
import synthtool
@@ -325,9 +290,12 @@ def handle_configure(
325290
)
326291
prepared_config = _prepare_new_library_config(new_library_config)
327292

328-
# Create a `CHANGELOG.md` and `docs/CHANGELOG.md` file for the new library
293+
is_mono_repo = _is_mono_repo(input)
329294
library_id = _get_library_id(prepared_config)
330-
_create_new_changelog_for_library(library_id, output)
295+
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
296+
if not Path(f"{repo}/{path_to_library}").exists():
297+
# Create a `CHANGELOG.md` and `docs/CHANGELOG.md` file for the new library
298+
_create_new_changelog_for_library(library_id, output)
331299

332300
# Write the new library configuration to configure-response.json.
333301
_write_json_file(f"{librarian}/configure-response.json", prepared_config)
@@ -355,9 +323,8 @@ def _get_library_id(request_data: Dict) -> str:
355323
return library_id
356324

357325

358-
@track_time
359326
def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
360-
"""Runs the synthtool post-processor (templates) and Ruff formatter (lint/format).
327+
"""Runs the synthtool post-processor on the output directory.
361328
362329
Args:
363330
output(str): Path to the directory in the container where code
@@ -367,58 +334,25 @@ def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
367334
"""
368335
os.chdir(output)
369336
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
370-
371-
# 1. Run Synthtool (Templates & Fixers only)
372-
# Note: This relies on 'nox' being disabled in your environment (via run_fast.sh shim)
373-
# to avoid the slow formatting step inside owlbot.
374-
logger.info("Running Python post-processor (Templates & Fixers)...")
337+
logger.info("Running Python post-processor...")
375338
if SYNTHTOOL_INSTALLED:
376-
try:
377-
if is_mono_repo:
378-
python_mono_repo.owlbot_main(path_to_library)
339+
if is_mono_repo:
340+
python_mono_repo.owlbot_main(path_to_library)
341+
else:
342+
# Some repositories have customizations in `librarian.py`.
343+
# If this file exists, run those customizations instead of `owlbot_main`
344+
if Path(f"{output}/librarian.py").exists():
345+
subprocess.run(["python3.14", f"{output}/librarian.py"])
379346
else:
380-
# Handle custom librarian scripts if present
381-
if Path(f"{output}/librarian.py").exists():
382-
subprocess.run(["python3.14", f"{output}/librarian.py"])
383-
else:
384-
python.owlbot_main()
385-
except Exception as e:
386-
logger.warning(f"Synthtool warning (non-fatal): {e}")
387-
388-
# 2. Run RUFF (Fast Formatter & Import Sorter)
389-
# This replaces both 'isort' and 'black' and runs in < 1 second.
390-
# We hardcode flags here to match Black defaults so you don't need config files.
391-
# logger.info("🚀 Running Ruff (Fast Formatter)...")
392-
try:
393-
# STEP A: Fix Imports (like isort)
394-
subprocess.run(
395-
[
396-
"ruff", "check",
397-
"--select", "I", # Only run Import sorting rules
398-
"--fix", # Auto-fix them
399-
"--line-length=88", # Match Black default
400-
"--known-first-party=google", # Prevent 'google' moving to 3rd party block
401-
output
402-
],
403-
check=False,
404-
stdout=subprocess.DEVNULL,
405-
stderr=subprocess.DEVNULL
406-
)
347+
python.owlbot_main()
348+
else:
349+
raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER
407350

408-
# STEP B: Format Code (like black)
409-
subprocess.run(
410-
[
411-
"ruff", "format",
412-
"--line-length=88", # Match Black default
413-
output
414-
],
415-
check=False,
416-
stdout=subprocess.DEVNULL,
417-
stderr=subprocess.DEVNULL
418-
)
419-
except FileNotFoundError:
420-
logger.warning("⚠️ Ruff binary not found. Code will be unformatted.")
421-
logger.warning(" Please run: pip install ruff")
351+
# If there is no noxfile, run `isort`` and `black` on the output.
352+
# This is required for proto-only libraries which are not GAPIC.
353+
if not Path(f"{output}/{path_to_library}/noxfile.py").exists():
354+
subprocess.run(["isort", output])
355+
subprocess.run(["black", output])
422356

423357
logger.info("Python post-processor ran successfully.")
424358

@@ -458,7 +392,6 @@ def _add_header_to_files(directory: str) -> None:
458392
f.writelines(lines)
459393

460394

461-
@track_time
462395
def _copy_files_needed_for_post_processing(
463396
output: str, input: str, library_id: str, is_mono_repo: bool
464397
):
@@ -480,13 +413,19 @@ def _copy_files_needed_for_post_processing(
480413
destination_dir = f"{output}/{path_to_library}"
481414

482415
if Path(source_dir).exists():
483-
shutil.copytree(
484-
source_dir,
485-
destination_dir,
486-
dirs_exist_ok=True,
487-
)
488-
# Apply headers only to the generator-input files copied above.
489-
_add_header_to_files(destination_dir)
416+
with tempfile.TemporaryDirectory() as tmp_dir:
417+
shutil.copytree(
418+
source_dir,
419+
tmp_dir,
420+
dirs_exist_ok=True,
421+
)
422+
# Apply headers only to the generator-input files copied above.
423+
_add_header_to_files(tmp_dir)
424+
shutil.copytree(
425+
tmp_dir,
426+
destination_dir,
427+
dirs_exist_ok=True,
428+
)
490429

491430
# We need to create these directories so that we can copy files necessary for post-processing.
492431
os.makedirs(
@@ -505,7 +444,6 @@ def _copy_files_needed_for_post_processing(
505444
)
506445

507446

508-
@track_time
509447
def _clean_up_files_after_post_processing(
510448
output: str, library_id: str, is_mono_repo: bool
511449
):
@@ -652,7 +590,6 @@ def _get_repo_name_from_repo_metadata(base: str, library_id: str, is_mono_repo:
652590
return repo_name
653591

654592

655-
@track_time
656593
def _generate_repo_metadata_file(
657594
output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool
658595
):
@@ -694,7 +631,6 @@ def _generate_repo_metadata_file(
694631
_write_json_file(output_repo_metadata, metadata_content)
695632

696633

697-
@track_time
698634
def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
699635
"""Copies the README.rst file for a generated library to docs/README.rst.
700636
@@ -736,7 +672,6 @@ def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
736672
f.write(content)
737673

738674

739-
@track_time
740675
def handle_generate(
741676
librarian: str = LIBRARIAN_DIR,
742677
source: str = SOURCE_DIR,
@@ -785,7 +720,6 @@ def handle_generate(
785720
_run_post_processor(output, library_id, is_mono_repo)
786721
_copy_readme_to_docs(output, library_id, is_mono_repo)
787722
_clean_up_files_after_post_processing(output, library_id, is_mono_repo)
788-
789723
except Exception as e:
790724
raise ValueError("Generation failed.") from e
791725
logger.info("'generate' command executed.")
@@ -999,7 +933,6 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None:
999933
shutil.copytree(tmp_dir, staging_dir, dirs_exist_ok=True)
1000934

1001935

1002-
@track_time
1003936
def _generate_api(
1004937
api_path: str,
1005938
library_id: str,
@@ -1062,18 +995,12 @@ def _run_nox_sessions(library_id: str, repo: str, is_mono_repo: bool):
1062995
the config.yaml.
1063996
is_mono_repo(bool): True if the current repository is a mono-repo.
1064997
"""
1065-
path_to_library = f"{repo}/packages/{library_id}" if is_mono_repo else repo
1066-
_python_314_supported = Path(
1067-
f"{path_to_library}/testing/constraints-3.14.txt"
1068-
).exists()
1069-
1070-
if _python_314_supported:
1071-
session_runtime = "3.14"
1072-
else:
1073-
session_runtime = "3.13"
1074-
998+
session_runtime = "3.14"
999+
# TODO(https://github.com/googleapis/google-cloud-python/issues/14992): Switch the protobuf
1000+
# implementation back to upb once we identify the root cause of the crash that occurs during testing.
1001+
# It's not trivial to debug this since it only happens in cloud build.
10751002
sessions = [
1076-
f"unit-{session_runtime}(protobuf_implementation='upb')",
1003+
f"unit-{session_runtime}(protobuf_implementation='python')",
10771004
]
10781005
current_session = None
10791006
try:
@@ -1112,7 +1039,8 @@ def _run_individual_session(
11121039
"-f",
11131040
f"{library_path}/noxfile.py",
11141041
]
1115-
result = subprocess.run(command, text=True, check=True, timeout=600)
1042+
# TODO(#14992): Revert to 600 seconds (10 minutes) after debugging is complete.
1043+
result = subprocess.run(command, text=True, check=True, timeout=1200)
11161044
logger.info(result)
11171045

11181046

@@ -1820,7 +1748,6 @@ def handle_release_stage(
18201748
output=args.output,
18211749
input=args.input,
18221750
)
1823-
18241751
elif args.command == "build":
18251752
args.func(librarian=args.librarian, repo=args.repo)
18261753
elif args.command == "release-stage":

.generator/requirements.in

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
click
2-
gapic-generator==1.28.3 # format generated samples
2+
gapic-generator==1.30.3 # Fix mypy checks https://github.com/googleapis/gapic-generator-python/pull/2520
33
nox
44
starlark-pyo3>=2025.1
55
build
6-
ruff
6+
black==23.7.0
7+
isort==5.11.0

0 commit comments

Comments
 (0)