1313# limitations under the License.
1414
1515import argparse
16+ import glob
1617import json
1718import logging
1819import os
1920import re
21+ import shutil
2022import subprocess
2123import sys
22- import subprocess
2324from typing import Dict , List
2425
2526try :
2627 import synthtool
27- from synthtool import gcp
28+ from synthtool . languages import python_mono_repo
2829
2930 SYNTHTOOL_INSTALLED = True
3031 SYNTHTOOL_IMPORT_ERROR = None
31- except ImportError as e :
32+ except ImportError as e : # pragma: NO COVER
3233 SYNTHTOOL_IMPORT_ERROR = e
3334 SYNTHTOOL_INSTALLED = False
3435
@@ -90,7 +91,7 @@ def _determine_bazel_rule(api_path: str, source: str) -> str:
9091
9192 # This check is for a logical failure (no match), not a runtime exception.
9293 # It's good to keep it for clear error messaging.
93- if not match :
94+ if not match : # pragma: NO COVER
9495 raise ValueError (
9596 f"No Bazel rule with a name ending in '-py' found in { build_file_path } "
9697 )
@@ -136,7 +137,19 @@ def _build_bazel_target(bazel_rule: str, source: str):
136137 """
137138 logger .info (f"Executing build for rule: { bazel_rule } " )
138139 try :
139- command = ["bazelisk" , "build" , bazel_rule ]
140+ # We're using the prewarmed bazel cache from the docker image to speed up the bazelisk commands.
141+ # Previously built artifacts are stored in `/bazel_cache/_bazel_ubuntu/output_base` and will be
142+ # used to speed up the build. `disk_cache` is used as the 'remote cache' and is also prewarmed as part of
143+ # the docker image.
144+ # See https://bazel.build/remote/caching#disk-cache which explains using a file system as a 'remote cache'.
145+ command = [
146+ "bazelisk" ,
147+ "--output_base=/bazel_cache/_bazel_ubuntu/output_base" ,
148+ "build" ,
149+ "--disk_cache=/bazel_cache/_bazel_ubuntu/cache/repos" ,
150+ "--incompatible_strict_action_env" ,
151+ bazel_rule ,
152+ ]
140153 subprocess .run (
141154 command ,
142155 cwd = source ,
@@ -171,7 +184,14 @@ def _locate_and_extract_artifact(
171184 try :
172185 # 1. Find the bazel-bin output directory.
173186 logger .info ("Locating Bazel output directory..." )
174- info_command = ["bazelisk" , "info" , "bazel-bin" ]
187+ # Previously built artifacts are stored in `/bazel_cache/_bazel_ubuntu/output_base`.
188+ # See `--output_base` in `_build_bazel_target`
189+ info_command = [
190+ "bazelisk" ,
191+ "--output_base=/bazel_cache/_bazel_ubuntu/output_base" ,
192+ "info" ,
193+ "bazel-bin" ,
194+ ]
175195 result = subprocess .run (
176196 info_command ,
177197 cwd = source ,
@@ -206,17 +226,87 @@ def _locate_and_extract_artifact(
206226 ) from e
207227
208228
209- def _run_post_processor ():
210- """Runs the synthtool post-processor on the output directory."""
229+ def _run_post_processor (output : str , library_id : str ):
230+ """Runs the synthtool post-processor on the output directory.
231+
232+ Args:
233+ output(str): Path to the directory in the container where code
234+ should be generated.
235+ library_id(str): The library id to be used for post processing.
236+
237+ """
238+ os .chdir (output )
239+ path_to_library = f"packages/{ library_id } "
211240 logger .info ("Running Python post-processor..." )
212241 if SYNTHTOOL_INSTALLED :
213- command = ["python3" , "-m" , "synthtool.languages.python_mono_repo" ]
214- subprocess .run (command , cwd = OUTPUT_DIR , text = True , check = True )
242+ python_mono_repo .owlbot_main (path_to_library )
215243 else :
216- raise SYNTHTOOL_IMPORT_ERROR
244+ raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER
217245 logger .info ("Python post-processor ran successfully." )
218246
219247
248+ def _copy_files_needed_for_post_processing (output : str , input : str , library_id : str ):
249+ """Copy files to the output directory whcih are needed during the post processing
250+ step, such as .repo-metadata.json and script/client-post-processing, using
251+ the input directory as the source.
252+
253+ Args:
254+ output(str): Path to the directory in the container where code
255+ should be generated.
256+ input(str): The path to the directory in the container
257+ which contains additional generator input.
258+ library_id(str): The library id to be used for post processing.
259+ """
260+
261+ path_to_library = f"packages/{ library_id } "
262+
263+ # We need to create these directories so that we can copy files necessary for post-processing.
264+ os .makedirs (f"{ output } /{ path_to_library } " )
265+ os .makedirs (f"{ output } /{ path_to_library } /scripts/client-post-processing" )
266+ shutil .copy (
267+ f"{ input } /{ path_to_library } /.repo-metadata.json" ,
268+ f"{ output } /{ path_to_library } /.repo-metadata.json" ,
269+ )
270+
271+ # copy post-procesing files
272+ for post_processing_file in glob .glob (f"{ input } /client-post-processing/*.yaml" ): # pragma: NO COVER
273+ with open (post_processing_file , "r" ) as post_processing :
274+ if f"{ path_to_library } /" in post_processing .read ():
275+ shutil .copy (
276+ post_processing_file ,
277+ f"{ output } /{ path_to_library } /scripts/client-post-processing" ,
278+ )
279+
280+
281+ def _clean_up_files_after_post_processing (output : str , library_id : str ):
282+ """
283+ Clean up files which should not be included in the generated client
284+
285+ Args:
286+ output(str): Path to the directory in the container where code
287+ should be generated.
288+ library_id(str): The library id to be used for post processing.
289+ """
290+ path_to_library = f"packages/{ library_id } "
291+ shutil .rmtree (f"{ output } /{ path_to_library } /.nox" )
292+ os .remove (f"{ output } /{ path_to_library } /CHANGELOG.md" )
293+ os .remove (f"{ output } /{ path_to_library } /docs/CHANGELOG.md" )
294+ os .remove (f"{ output } /{ path_to_library } /docs/README.rst" )
295+ for post_processing_file in glob .glob (
296+ f"{ output } /{ path_to_library } /scripts/client-post-processing/*.yaml"
297+ ): # pragma: NO COVER
298+ os .remove (post_processing_file )
299+ for gapic_version_file in glob .glob (
300+ f"{ output } /{ path_to_library } /**/gapic_version.py" , recursive = True
301+ ): # pragma: NO COVER
302+ os .remove (gapic_version_file )
303+ for snippet_metadata_file in glob .glob (
304+ f"{ output } /{ path_to_library } /samples/generated_samples/snippet_metadata*.json"
305+ ): # pragma: NO COVER
306+ os .remove (snippet_metadata_file )
307+ shutil .rmtree (f"{ output } /owl-bot-staging" )
308+
309+
220310def handle_generate (
221311 librarian : str = LIBRARIAN_DIR ,
222312 source : str = SOURCE_DIR ,
@@ -238,7 +328,7 @@ def handle_generate(
238328 API protos.
239329 output(str): Path to the directory in the container where code
240330 should be generated.
241- input(str): The path path to the directory in the container
331+ input(str): The path to the directory in the container
242332 which contains additional generator input.
243333
244334 Raises:
@@ -249,7 +339,6 @@ def handle_generate(
249339 # Read a generate-request.json file
250340 request_data = _read_json_file (f"{ librarian } /{ GENERATE_REQUEST_FILE } " )
251341 library_id = _get_library_id (request_data )
252-
253342 for api in request_data .get ("apis" , []):
254343 api_path = api .get ("path" )
255344 if api_path :
@@ -258,7 +347,15 @@ def handle_generate(
258347 _locate_and_extract_artifact (
259348 bazel_rule , library_id , source , output , api_path
260349 )
261- _run_post_processor (output , f"packages/{ library_id } " )
350+
351+ _copy_files_needed_for_post_processing (output , input , library_id )
352+ _run_post_processor (output , library_id )
353+ _clean_up_files_after_post_processing (output , library_id )
354+
355+ # Write the `generate-response.json` using `generate-request.json` as the source
356+ with open (f"{ librarian } /generate-response.json" , "w" ) as f :
357+ json .dump (request_data , f , indent = 4 )
358+ f .write ("\n " )
262359
263360 except Exception as e :
264361 raise ValueError ("Generation failed." ) from e
@@ -293,6 +390,7 @@ def _run_individual_session(nox_session: str, library_id: str):
293390 nox_session(str): The nox session to run
294391 library_id(str): The library id under test
295392 """
393+
296394 command = [
297395 "nox" ,
298396 "-s" ,
@@ -324,7 +422,7 @@ def handle_build(librarian: str = LIBRARIAN_DIR):
324422 logger .info ("'build' command executed." )
325423
326424
327- if __name__ == "__main__" :
425+ if __name__ == "__main__" : # pragma: NO COVER
328426 parser = argparse .ArgumentParser (description = "A simple CLI tool." )
329427 subparsers = parser .add_subparsers (
330428 dest = "command" , required = True , help = "Available commands"
@@ -373,7 +471,6 @@ def handle_build(librarian: str = LIBRARIAN_DIR):
373471 sys .exit (1 )
374472
375473 args = parser .parse_args ()
376- args .func ()
377474
378475 # Pass specific arguments to the handler functions for generate/build
379476 if args .command == "generate" :
0 commit comments