Skip to content

Commit

Permalink
refactor: fix invalid escape sequences
Browse files Browse the repository at this point in the history
Signed-off-by: Kevin James <[email protected]>
  • Loading branch information
TheKevJames committed Sep 10, 2024
1 parent 1cded35 commit e2e7702
Show file tree
Hide file tree
Showing 19 changed files with 46 additions and 40 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ repos:
- id: double-quote-string-fixer
- id: no-commit-to-branch
args: [--branch, master]
- repo: https://github.com/PyCQA/flake8
rev: 7.1.1
hooks:
- id: flake8
args:
- --select=W605
# required formatting jobs (run these last)

# add comment "noqa" to ignore an import that should not be removed
Expand Down
2 changes: 1 addition & 1 deletion backend/src/apiserver/visualization/types/tfdv.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_statistics_html(
# facets element and then remove it once we have appended the serialized proto
# string to the element. We do this to avoid any collision of ids when
# displaying multiple facets output in the notebook.
html_template = """<iframe id='facets-iframe' width="100%" height="500px"></iframe>
html_template = r"""<iframe id='facets-iframe' width="100%" height="500px"></iframe>
<script>
facets_iframe = document.getElementById('facets-iframe');
facets_html = '<script src="https://cdnjs.cloudflare.com/ajax/libs/webcomponentsjs/1.3.3/webcomponents-lite.js"><\/script><link rel="import" href="https://raw.githubusercontent.com/PAIR-code/facets/master/facets-dist/facets-jupyter.html"><facets-overview proto-input="protostr"></facets-overview>';
Expand Down
2 changes: 1 addition & 1 deletion components/aws/athena/query/src/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def query(client, query, database, output, workgroup=None):
"OutputLocation"
]
# could be multiple files?
filename = re.findall(".*\/(.*)", s3_path)[0]
filename = re.findall(r".*\/(.*)", s3_path)[0]
logging.info("S3 output file name %s", filename)
break
time.sleep(5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def write_to_artifact(executor_input, text):
# Add URI Prefix
# "https://[location]-aiplatform.googleapis.com/API_VERSION/": For AI Platform resource names, current version is defined in AIPLATFORM_API_VERSION.
if RESOURCE_NAME_PATTERN.match(text):
location = re.findall('locations/([\w\-]+)', text)[0]
location = re.findall(r'locations/([\w\-]+)', text)[0]
uri_with_prefix = f'https://{location}-aiplatform.googleapis.com/{AIPLATFORM_API_VERSION}/{text}'
metadata.update({'resourceName': text})

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def hyperparameter_tuning_job(
project: str = _placeholders.PROJECT_ID_PLACEHOLDER,
):
# fmt: off
"""Creates a Vertex AI hyperparameter tuning job and waits for it to
r"""Creates a Vertex AI hyperparameter tuning job and waits for it to
complete.
See [more information](https://cloud.google.com/vertex-ai/docs/training/using-hyperparameter-tuning).
Expand Down
4 changes: 2 additions & 2 deletions proxy/get_proxy_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def urls_for_zone(zone, location_to_urls_map):
...
}
"""
zone_match = re.match("((([a-z]+)-[a-z]+)\d+)-[a-z]", zone)
zone_match = re.match(r"((([a-z]+)-[a-z]+)\d+)-[a-z]", zone)
if not zone_match:
raise ValueError("Incorrect zone specified: {}".format(zone))

Expand All @@ -55,7 +55,7 @@ def urls_for_zone(zone, location_to_urls_map):
url for url in location_to_urls_map[region] if url not in urls
])

region_regex = re.compile("([a-z]+-[a-z]+)\d+")
region_regex = re.compile(r"([a-z]+-[a-z]+)\d+")
for location in location_to_urls_map:
region_match = region_regex.match(location)
if region_match and region_match.group(1) == approx_region:
Expand Down
12 changes: 6 additions & 6 deletions sdk/python/kfp/compiler/compiler_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4445,7 +4445,7 @@ def test_other_control_flow_instantiated_between_if_else_not_permitted(
self):
with self.assertRaisesRegex(
tasks_group.InvalidControlFlowException,
'dsl\.Else can only be used following an upstream dsl\.If or dsl\.Elif\.'
r'dsl\.Else can only be used following an upstream dsl\.If or dsl\.Elif\.'
):

@dsl.pipeline
Expand Down Expand Up @@ -4908,7 +4908,7 @@ def flip_coin_pipeline(execute_pipeline: bool):
def test_nested_under_condition_returned_raises(self):
with self.assertRaisesRegex(
compiler_utils.InvalidTopologyException,
f'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.If\.'
r'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.If\.'
):

@dsl.pipeline
Expand Down Expand Up @@ -4961,7 +4961,7 @@ def test_deeply_nested_returned_raises(self):

with self.assertRaisesRegex(
compiler_utils.InvalidTopologyException,
f'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.ParallelFor\.'
r'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.ParallelFor\.'
):

@dsl.pipeline
Expand All @@ -4983,7 +4983,7 @@ def test_consume_at_wrong_level(self):

with self.assertRaisesRegex(
compiler_utils.InvalidTopologyException,
f'Illegal task dependency across DSL context managers\. A downstream task cannot depend on an upstream task within a dsl\.If context unless the downstream is within that context too\. Found task print-artifact which depends on upstream task condition-branches-5 within an uncommon dsl\.If context\.'
r'Illegal task dependency across DSL context managers\. A downstream task cannot depend on an upstream task within a dsl\.If context unless the downstream is within that context too\. Found task print-artifact which depends on upstream task condition-branches-5 within an uncommon dsl\.If context\.'
):

@dsl.pipeline
Expand All @@ -5006,7 +5006,7 @@ def flip_coin_pipeline(execute_pipeline: bool):
def test_return_at_wrong_level(self):
with self.assertRaisesRegex(
compiler_utils.InvalidTopologyException,
f'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.If\.'
r'Pipeline outputs may only be returned from the top level of the pipeline function scope\. Got pipeline output dsl\.OneOf from within the control flow group dsl\.If\.'
):

@dsl.pipeline
Expand Down Expand Up @@ -5143,7 +5143,7 @@ def roll_die_pipeline(
def test_oneof_in_fstring(self):
with self.assertRaisesRegex(
NotImplementedError,
f'dsl\.OneOf does not support string interpolation\.'):
r'dsl\.OneOf does not support string interpolation\.'):

@dsl.pipeline
def roll_die_pipeline():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _load_compiled_template(filename: str) -> Dict:

if 'container' in template:
template['container'] = json.loads(
re.sub("'kfp==(\d+).(\d+).(\d+)'", 'kfp',
re.sub(r"'kfp==(\d+).(\d+).(\d+)'", 'kfp',
json.dumps(template['container'])))

return workflow
Expand Down Expand Up @@ -154,8 +154,8 @@ def my_pipeline():
with self.assertRaisesRegex(
RuntimeError,
'Constructing ContainerOp instances directly is deprecated and not '
'supported when compiling to v2 \(using v2 compiler or v1 compiler '
'with V2_COMPATIBLE or V2_ENGINE mode\).'):
r'supported when compiling to v2 \(using v2 compiler or v1 compiler '
r'with V2_COMPATIBLE or V2_ENGINE mode\)\.'):
compiler.Compiler(
mode=v1dsl.PipelineExecutionMode.V2_COMPATIBLE).compile(
pipeline_func=my_pipeline, package_path='result.json')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def get_short_type_name(type_name: str) -> str:
Returns:
The short form type name or the original name if pattern doesn't match.
"""
match = re.match('(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
match = re.match(r'(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
if match:
return match.group('type')
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1224,7 +1224,7 @@ def my_func(args: Sequence[int]):
task_factory = comp.create_component_from_func(my_func)
with self.assertRaisesRegex(
TypeError,
'There are no registered serializers for type "(typing.)?Sequence(\[int\])?"'
r'There are no registered serializers for type "(typing.)?Sequence(\[int\])?"'
):
self.helper_test_component_using_local_call(
task_factory, arguments={'args': [1, 2, 3]})
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/kfp/deprecated/dsl/_ops_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _get_matching_opsgroup_already_in_pipeline(group_type, name):
if name is None:
return None
name_pattern = '^' + (group_type + '-' + name + '-').replace(
'_', '-') + '[\d]+$'
'_', '-') + r'[\d]+$'
for ops_group_already_in_pipeline in _pipeline.Pipeline.get_default_pipeline(
).groups:
import re
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/kfp/dsl/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def get_short_type_name(type_name: str) -> str:
Returns:
The short form type name or the original name if pattern doesn't match.
"""
match = re.match('(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
match = re.match(r'(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
return match['type'] if match else type_name


Expand Down
4 changes: 2 additions & 2 deletions sdk/python/kfp/dsl/for_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def _get_loop_item_type(type_name: str) -> Optional[str]:
Returns:
The collection item type or None if no match found.
"""
match = re.match('(typing\.)?(?:\w+)(?:\[(?P<item_type>.+)\])', type_name)
match = re.match(r'(typing\.)?(?:\w+)(?:\[(?P<item_type>.+)\])', type_name)
return match['item_type'].lstrip().rstrip() if match else None


Expand All @@ -64,7 +64,7 @@ def _get_subvar_type(type_name: str) -> Optional[str]:
The dictionary value type or None if no match found.
"""
match = re.match(
'(typing\.)?(?:\w+)(?:\[\s*(?:\w+)\s*,\s*(?P<value_type>.+)\])',
r'(typing\.)?(?:\w+)(?:\[\s*(?:\w+)\s*,\s*(?P<value_type>.+)\])',
type_name)
return match['value_type'].lstrip().rstrip() if match else None

Expand Down
14 changes: 7 additions & 7 deletions sdk/python/kfp/dsl/placeholders_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def test_valid(self, placeholder_obj: placeholders.IfPresentPlaceholder,
def test_only_single_element_ifpresent_inside_concat_outer(self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
placeholders.ConcatPlaceholder([
'b',
placeholders.IfPresentPlaceholder(
Expand All @@ -309,7 +309,7 @@ def test_only_single_element_ifpresent_inside_concat_outer(self):
def test_only_single_element_ifpresent_inside_concat_recursive(self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
placeholders.ConcatPlaceholder([
'a',
placeholders.ConcatPlaceholder([
Expand All @@ -323,7 +323,7 @@ def test_only_single_element_ifpresent_inside_concat_recursive(self):

with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
placeholders.ConcatPlaceholder([
'a',
placeholders.ConcatPlaceholder([
Expand All @@ -341,7 +341,7 @@ def test_only_single_element_ifpresent_inside_concat_recursive(self):
def test_only_single_element_in_nested_ifpresent_inside_concat(self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
dsl.ConcatPlaceholder([
'my-prefix-',
dsl.IfPresentPlaceholder(
Expand All @@ -357,7 +357,7 @@ def test_recursive_nested_placeholder_validation_does_not_exit_when_first_valid_
self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
dsl.ConcatPlaceholder([
'my-prefix-',
dsl.IfPresentPlaceholder(
Expand All @@ -371,7 +371,7 @@ def test_only_single_element_in_nested_ifpresent_inside_concat_with_outer_ifpres
self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
dsl.IfPresentPlaceholder(
input_name='input_1',
then=dsl.ConcatPlaceholder([
Expand All @@ -386,7 +386,7 @@ def test_only_single_element_in_nested_ifpresent_inside_concat_with_outer_ifpres
def test_valid_then_but_invalid_else(self):
with self.assertRaisesRegex(
ValueError,
f'Please use a single element for `then` and `else_` only\.'):
r'Please use a single element for `then` and `else_` only\.'):
dsl.ConcatPlaceholder([
'my-prefix-',
dsl.IfPresentPlaceholder(
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/kfp/dsl/tasks_group_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def foo() -> str:
def my_pipeline(string: str):
with self.assertWarnsRegex(
DeprecationWarning,
'dsl\.Condition is deprecated\. Please use dsl\.If instead\.'
r'dsl\.Condition is deprecated\. Please use dsl\.If instead\.'
):
with dsl.Condition(string == 'text'):
foo()
2 changes: 1 addition & 1 deletion sdk/python/kfp/dsl/types/type_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def get_short_type_name(type_name: str) -> str:
Returns:
The short form type name or the original name if pattern doesn't match.
"""
match = re.match('(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
match = re.match(r'(typing\.)?(?P<type>\w+)(?:\[.+\])?', type_name)
return match['type'] if match else type_name


Expand Down
2 changes: 1 addition & 1 deletion sdk/python/kfp/local/config_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def test_validate_success(self):
def test_validate_fail(self):
with self.assertRaisesRegex(
RuntimeError,
f"Local environment not initialized. Please run 'kfp\.local\.init\(\)' before executing tasks locally\."
r"Local environment not initialized. Please run 'kfp\.local\.init\(\)' before executing tasks locally\."
):
config.LocalExecutionConfig.validate()

Expand Down
8 changes: 4 additions & 4 deletions test/sample-test/sample_test_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def _compile(self):
for file in list_of_files:
# matching by .py or .ipynb, there will be yaml ( compiled ) files in the folder.
# if you rerun the test suite twice, the test suite will fail
m = re.match(self._test_name + '\.(py|ipynb)$', file)
m = re.match(self._test_name + r'\.(py|ipynb)$', file)
if m:
file_name, ext_name = os.path.splitext(file)
if self._is_notebook is not None:
Expand Down Expand Up @@ -242,14 +242,14 @@ def __init__(self,
def _injection(self):
"""Sample-specific image injection into yaml file."""
subs = { # Tag can look like 1.0.0-rc.3, so we need both "-" and "." in the regex.
'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-confusion-matrix:(\w+|[.-])+':
r'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-confusion-matrix:(\w+|[.-])+':
self._local_confusionmatrix_image,
'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-roc:(\w+|[.-])+':
r'gcr\.io/ml-pipeline/ml-pipeline/ml-pipeline-local-roc:(\w+|[.-])+':
self._local_roc_image
}
if self._test_name == 'xgboost_training_cm':
subs.update({
'gcr\.io/ml-pipeline/ml-pipeline-gcp:(\w|[.-])+':
r'gcr\.io/ml-pipeline/ml-pipeline-gcp:(\w|[.-])+':
self._dataproc_gcp_image
})

Expand Down
10 changes: 5 additions & 5 deletions test/sample-test/unittests/utils_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ def tearDown(self) -> None:
def test_file_injection(self):
"""Test file_injection function."""
subs = {
'gcr\.io/ml-pipeline/ml-pipeline-local-confusion-matrix:\w+':'gcr.io/ml-pipeline/LOCAL_CONFUSION_MATRIX_IMAGE',
'gcr\.io/ml-pipeline/ml-pipeline-dataproc-analyze:\w+':'gcr.io/ml-pipeline/DATAPROC_ANALYZE_IMAGE',
'gcr\.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_CREATE_IMAGE',
'gcr\.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_DELETE_IMAGE',
r'gcr\.io/ml-pipeline/ml-pipeline-local-confusion-matrix:\w+':'gcr.io/ml-pipeline/LOCAL_CONFUSION_MATRIX_IMAGE',
r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-analyze:\w+':'gcr.io/ml-pipeline/DATAPROC_ANALYZE_IMAGE',
r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_CREATE_IMAGE',
r'gcr\.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:\w+':'gcr.io/ml-pipeline/DATAPROC_DELETE_IMAGE',
}
utils.file_injection(
os.path.join(_WORK_DIR, 'test_file_injection.yaml'),
Expand All @@ -64,4 +64,4 @@ def test_file_injection(self):
with open(os.path.join(_WORK_DIR,
'test_file_injection.yaml'), 'r') as f:
injected = yaml.safe_load(f)
self.assertEqual(golden, injected)
self.assertEqual(golden, injected)

0 comments on commit e2e7702

Please sign in to comment.