From 1de93bd6a4adb8920472818c1fc24abe7c8cf164 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 19 Aug 2024 11:10:28 +0530 Subject: [PATCH 01/65] Add select records to step with random selection --- cumulusci/tasks/bulkdata/load.py | 16 ++++--- cumulusci/tasks/bulkdata/step.py | 72 +++++++++++++++++++++++++++++++- 2 files changed, 82 insertions(+), 6 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 4ae0dcf31a..51c222ee55 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -289,7 +289,12 @@ def _execute_step( self, step, self._stream_queried_data(mapping, local_ids, query) ) step.start() - step.load_records(self._stream_queried_data(mapping, local_ids, query)) + if mapping.action == DataOperationType.SELECT: + step.select_records( + self._stream_queried_data(mapping, local_ids, query) + ) + else: + step.load_records(self._stream_queried_data(mapping, local_ids, query)) step.end() # Process Job Results @@ -481,10 +486,11 @@ def _process_job_results(self, mapping, step, local_ids): """Get the job results and process the results. If we're raising for row-level errors, do so; if we're inserting, store the new Ids.""" - is_insert_or_upsert = mapping.action in ( + is_insert_upsert_or_select = mapping.action in ( DataOperationType.INSERT, DataOperationType.UPSERT, DataOperationType.ETL_UPSERT, + DataOperationType.SELECT, ) conn = self.session.connection() @@ -500,7 +506,7 @@ def _process_job_results(self, mapping, step, local_ids): break # If we know we have no successful inserts, don't attempt to persist Ids. # Do, however, drain the generator to get error-checking behavior. - if is_insert_or_upsert and ( + if is_insert_upsert_or_select and ( step.job_result.records_processed - step.job_result.total_row_errors ): table = self.metadata.tables[self.ID_TABLE_NAME] @@ -516,7 +522,7 @@ def _process_job_results(self, mapping, step, local_ids): # person account Contact records so lookups to # person account Contact records get populated downstream as expected. if ( - is_insert_or_upsert + is_insert_upsert_or_select and mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping) ): @@ -531,7 +537,7 @@ def _process_job_results(self, mapping, step, local_ids): ), ) - if is_insert_or_upsert: + if is_insert_upsert_or_select: self.session.commit() def _generate_results_id_map(self, step, local_ids): diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index edcb62afbb..5da84930d2 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -15,6 +15,9 @@ from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.core.utils import process_bool_arg +from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( + DEFAULT_DECLARATIONS, +) from cumulusci.tasks.bulkdata.utils import iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict from cumulusci.utils.xml import lxml_parse_string @@ -36,6 +39,7 @@ class DataOperationType(StrEnum): UPSERT = "upsert" ETL_UPSERT = "etl_upsert" SMART_UPSERT = "smart_upsert" # currently undocumented + SELECT = "select" class DataApi(StrEnum): @@ -320,6 +324,11 @@ def get_prev_record_values(self, records): """Get the previous records values in case of UPSERT and UPDATE to prepare for rollback""" pass + @abstractmethod + def select_records(self, records): + """Perform the requested DML operation on the supplied row iterator.""" + pass + @abstractmethod def load_records(self, records): """Perform the requested DML operation on the supplied row iterator.""" @@ -424,6 +433,9 @@ def load_records(self, records): self.context.logger.info(f"Uploading batch {count + 1}") self.batch_ids.append(self.bulk.post_batch(self.job_id, iter(csv_batch))) + def select_records(self, records): + return super().select_records(records) + def _batch(self, records, n, char_limit=10000000): """Given an iterator of records, yields batches of records serialized in .csv format. @@ -631,6 +643,64 @@ def load_records(self, records): row_errors, ) + def select_records(self, records): + """Executes a SOQL query to select records and adds them to results""" + self.results = [] + num_records = sum(1 for _ in records) + selected_records = self.random_selection(num_records) + self.results.extend(selected_records) + self.job_result = DataOperationJobResult( + DataOperationStatus.SUCCESS + if not len(selected_records) + else DataOperationStatus.JOB_FAILURE, + [], + len(self.results), + 0, + ) + + def random_selection(self, num_records): + try: + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(self.sobject) + if declaration: + where_clause = declaration.where + else: + where_clause = None # Explicitly set to None if not found + # Construct the query with the WHERE clause (if it exists) + query = f"SELECT Id FROM {self.sobject}" + if where_clause: + query += f" WHERE {where_clause}" + query += f" LIMIT {num_records}" + query_results = self.sf.query(query) + + # Handle case where query returns 0 records + if not query_results["records"]: + error_message = ( + f"No records found for {self.sobject} in the target org." + ) + self.logger.error(error_message) + return [], error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"success": True, "id": record["Id"]} + for record in query_results["records"] + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records + + except Exception as e: + error_message = f"Error executing SOQL query for {self.sobject}: {e}" + self.logger.error(error_message) + return [], error_message + def get_results(self): """Return a generator of DataOperationResult objects.""" @@ -646,7 +716,7 @@ def _convert(res): if self.operation == DataOperationType.INSERT: created = True - elif self.operation == DataOperationType.UPDATE: + elif self.operation in [DataOperationType.UPDATE, DataOperationType.SELECT]: created = False else: created = res.get("created") From 95d6414749268de50b4d2e6d2ffb62bbfa8d1700 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 19 Aug 2024 17:01:56 +0530 Subject: [PATCH 02/65] Move random selection strategy outside DML Operation Class --- .../extract_dataset_utils/extract_yml.py | 9 +- cumulusci/tasks/bulkdata/load.py | 2 + cumulusci/tasks/bulkdata/step.py | 241 +++++++++++++----- 3 files changed, 189 insertions(+), 63 deletions(-) diff --git a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py index 95d6b9ff97..9679da5a1a 100644 --- a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +++ b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py @@ -5,7 +5,6 @@ from pydantic import Field, validator from cumulusci.core.enums import StrEnum -from cumulusci.tasks.bulkdata.step import DataApi from cumulusci.utils.yaml.model_parser import CCIDictModel, HashableBaseModel object_decl = re.compile(r"objects\((\w+)\)", re.IGNORECASE) @@ -25,6 +24,14 @@ class SFFieldGroupTypes(StrEnum): required = "required" +class DataApi(StrEnum): + """Enum defining requested Salesforce data API for an operation.""" + + BULK = "bulk" + REST = "rest" + SMART = "smart" + + class ExtractDeclaration(HashableBaseModel): where: T.Optional[str] = None fields_: T.Union[T.List[str], str] = Field(["FIELDS(ALL)"], alias="fields") diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 51c222ee55..d6adf1395a 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -341,6 +341,8 @@ def configure_step(self, mapping): self.check_simple_upsert(mapping) api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT + elif mapping.action == DataOperationType.SELECT: + action = DataOperationType.QUERY else: action = mapping.action diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 5da84930d2..eb9d11023c 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -434,7 +434,60 @@ def load_records(self, records): self.batch_ids.append(self.bulk.post_batch(self.job_id, iter(csv_batch))) def select_records(self, records): - return super().select_records(records) + """Executes a SOQL query to select records and adds them to results""" + + self.select_results = [] # Store selected records + + # Count total number of records to fetch + total_num_records = sum(1 for _ in records) + + # Process in batches based on batch_size from api_options + for offset in range( + 0, total_num_records, self.api_options.get("batch_size", 500) + ): + # Calculate number of records to fetch in this batch + num_records = min( + self.api_options.get("batch_size", 500), total_num_records - offset + ) + + # Generate and execute SOQL query + query = random_generate_query(self.sobject, num_records) + self.batch_id = self.bulk.query(self.job_id, query) + self._wait_for_job(self.job_id) + + # Get and process query results + result_ids = self.bulk.get_query_batch_result_ids( + self.batch_id, job_id=self.job_id + ) + query_records = [] + for result_id in result_ids: + uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}" + with download_file(uri, self.bulk) as f: + reader = csv.reader(f) + self.headers = next(reader) + if "Records not found for this query" in self.headers: + break # Stop if no records found + for row in reader: + query_records.append([row[0]]) + + # Post-process the query results + selected_records, error_message = random_post_process( + query_records, num_records, self.sobject + ) + if error_message: + break # Stop if there's an error during post-processing + + self.select_results.extend(selected_records) + + # Update job result based on selection outcome + self.job_result = DataOperationJobResult( + DataOperationStatus.SUCCESS + if len(self.select_results) + else DataOperationStatus.JOB_FAILURE, + [error_message] if error_message else [], + len(self.select_results), + 0, + ) def _batch(self, records, n, char_limit=10000000): """Given an iterator of records, yields batches of @@ -484,6 +537,29 @@ def _serialize_csv_record(self, record): return serialized def get_results(self): + """ + Retrieves and processes the results of a Bulk API operation. + """ + + if self.operation is DataOperationType.QUERY: + yield from self._get_query_results() + else: + yield from self._get_batch_results() + + def _get_query_results(self): + """Handles results for QUERY (select) operations""" + for row in self.select_results: + success = process_bool_arg(row["success"]) + created = process_bool_arg(row["created"]) + yield DataOperationResult( + row["id"] if success else None, + success, + None, + created, + ) + + def _get_batch_results(self): + """Handles results for other DataOperationTypes (insert, update, etc.)""" for batch_id in self.batch_ids: try: results_url = ( @@ -493,24 +569,28 @@ def get_results(self): # to avoid the server dropping connections with download_file(results_url, self.bulk) as f: self.logger.info(f"Downloaded results for batch {batch_id}") + yield from self._parse_batch_results(f) - reader = csv.reader(f) - next(reader) # skip header - - for row in reader: - success = process_bool_arg(row[1]) - created = process_bool_arg(row[2]) - yield DataOperationResult( - row[0] if success else None, - success, - row[3] if not success else None, - created, - ) except Exception as e: raise BulkDataException( f"Failed to download results for batch {batch_id} ({str(e)})" ) + def _parse_batch_results(self, f): + """Parses batch results from the downloaded file""" + reader = csv.reader(f) + next(reader) # Skip header row + + for row in reader: + success = process_bool_arg(row[1]) + created = process_bool_arg(row[2]) + yield DataOperationResult( + row[0] if success else None, + success, + row[3] if not success else None, + created, + ) + class RestApiDmlOperation(BaseDmlOperation): """Operation class for all DML operations run using the REST API.""" @@ -645,62 +725,55 @@ def load_records(self, records): def select_records(self, records): """Executes a SOQL query to select records and adds them to results""" + + def convert(rec, fields): + """Helper function to convert record values to strings, handling None values""" + return [str(rec[f]) if rec[f] is not None else "" for f in fields] + self.results = [] - num_records = sum(1 for _ in records) - selected_records = self.random_selection(num_records) - self.results.extend(selected_records) + # Count the number of records to fetch + total_num_records = sum(1 for _ in records) + + # Process in batches + for offset in range(0, total_num_records, self.api_options.get("batch_size")): + num_records = min( + self.api_options.get("batch_size"), total_num_records - offset + ) + # Generate the SOQL query with and LIMIT + query = random_generate_query(self.sobject, num_records) + + # Execute the query and extract results + response = self.sf.query(query) + # Extract and convert 'Id' fields from the query results + query_records = list(convert(rec, ["Id"]) for rec in response["records"]) + # Handle pagination if there are more records within this batch + while not response["done"]: + response = self.sf.query_more( + response["nextRecordsUrl"], identifier_is_url=True + ) + query_records.extend( + list(convert(rec, ["Id"]) for rec in response["records"]) + ) + + # Post-process the query results for this batch + selected_records, error_message = random_post_process( + query_records, num_records, self.sobject + ) + if error_message: + break + # Add selected records from this batch to the overall results + self.results.extend(selected_records) + + # Update the job result based on the overall selection outcome self.job_result = DataOperationJobResult( DataOperationStatus.SUCCESS - if not len(selected_records) + if len(self.results) # Check the overall results length else DataOperationStatus.JOB_FAILURE, - [], + [error_message] if error_message else [], len(self.results), 0, ) - def random_selection(self, num_records): - try: - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(self.sobject) - if declaration: - where_clause = declaration.where - else: - where_clause = None # Explicitly set to None if not found - # Construct the query with the WHERE clause (if it exists) - query = f"SELECT Id FROM {self.sobject}" - if where_clause: - query += f" WHERE {where_clause}" - query += f" LIMIT {num_records}" - query_results = self.sf.query(query) - - # Handle case where query returns 0 records - if not query_results["records"]: - error_message = ( - f"No records found for {self.sobject} in the target org." - ) - self.logger.error(error_message) - return [], error_message - - # Add 'success: True' to each record to emulate records have been inserted - selected_records = [ - {"success": True, "id": record["Id"]} - for record in query_results["records"] - ] - - # If fewer records than requested, repeat existing records to match num_records - if len(selected_records) < num_records: - original_records = selected_records.copy() - while len(selected_records) < num_records: - selected_records.extend(original_records) - selected_records = selected_records[:num_records] - - return selected_records - - except Exception as e: - error_message = f"Error executing SOQL query for {self.sobject}: {e}" - self.logger.error(error_message) - return [], error_message - def get_results(self): """Return a generator of DataOperationResult objects.""" @@ -716,7 +789,7 @@ def _convert(res): if self.operation == DataOperationType.INSERT: created = True - elif self.operation in [DataOperationType.UPDATE, DataOperationType.SELECT]: + elif self.operation == DataOperationType.UPDATE: created = False else: created = res.get("created") @@ -816,3 +889,47 @@ def get_dml_operation( context=context, fields=fields, ) + + +def random_generate_query(sobject: str, num_records: float) -> str: + """Generates the SOQL query for the random selection strategy""" + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + where_clause = declaration.where + else: + where_clause = None + # Construct the query with the WHERE clause (if it exists) + query = f"SELECT Id FROM {sobject}" + if where_clause: + query += f" WHERE {where_clause}" + query += f" LIMIT {num_records}" + + return query + + +def random_post_process(records, num_records: float, sobject: str): + """Processes the query results for the random selection strategy""" + try: + # Handle case where query returns 0 records + if not records: + error_message = f"No records found for {sobject} in the target org." + return [], error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"id": record[0], "success": True, "created": False} for record in records + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records, None # Return selected records and None for error + + except Exception as e: + error_message = f"Error processing query results for {sobject}: {e}" + return [], error_message From 0fca3f4ddb4533552bb738fd4d2004191df4ce8a Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 19 Aug 2024 17:36:21 +0530 Subject: [PATCH 03/65] Separate select related functions into select_utils file --- cumulusci/tasks/bulkdata/select_utils.py | 54 ++++++++++++++ cumulusci/tasks/bulkdata/step.py | 95 ++++++++++-------------- datasets/mapping.yml | 1 + 3 files changed, 95 insertions(+), 55 deletions(-) create mode 100644 cumulusci/tasks/bulkdata/select_utils.py diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py new file mode 100644 index 0000000000..b712970035 --- /dev/null +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -0,0 +1,54 @@ +from cumulusci.core.enums import StrEnum +from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( + DEFAULT_DECLARATIONS, +) + + +class SelectStrategy(StrEnum): + """Enum defining the different selection strategies requested.""" + + RANDOM = "random" + + +def random_generate_query(sobject: str, num_records: float): + """Generates the SOQL query for the random selection strategy""" + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + where_clause = declaration.where + else: + where_clause = None + # Construct the query with the WHERE clause (if it exists) + query = f"SELECT Id FROM {sobject}" + if where_clause: + query += f" WHERE {where_clause}" + query += f" LIMIT {num_records}" + + return query, ["Id"] + + +def random_post_process(records, num_records: float, sobject: str): + """Processes the query results for the random selection strategy""" + try: + # Handle case where query returns 0 records + if not records: + error_message = f"No records found for {sobject} in the target org." + return [], error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"id": record[0], "success": True, "created": False} for record in records + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records, None # Return selected records and None for error + + except Exception as e: + error_message = f"Error processing query results for {sobject}: {e}" + return [], error_message diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index eb9d11023c..b0d4e31b44 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -15,8 +15,10 @@ from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.core.utils import process_bool_arg -from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( - DEFAULT_DECLARATIONS, +from cumulusci.tasks.bulkdata.select_utils import ( + SelectStrategy, + random_generate_query, + random_post_process, ) from cumulusci.tasks.bulkdata.utils import iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict @@ -347,7 +349,16 @@ def get_results(self): class BulkApiDmlOperation(BaseDmlOperation, BulkJobMixin): """Operation class for all DML operations run using the Bulk API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.RANDOM, + ): super().__init__( sobject=sobject, operation=operation, @@ -362,6 +373,10 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.csv_buff = io.StringIO(newline="") self.csv_writer = csv.writer(self.csv_buff, quoting=csv.QUOTE_ALL) + if selection_strategy is SelectStrategy.RANDOM: + self.select_generate_query = random_generate_query + self.select_post_process = random_post_process + def start(self): self.job_id = self.bulk.create_job( self.sobject, @@ -451,7 +466,7 @@ def select_records(self, records): ) # Generate and execute SOQL query - query = random_generate_query(self.sobject, num_records) + query, query_fields = self.select_generate_query(self.sobject, num_records) self.batch_id = self.bulk.query(self.job_id, query) self._wait_for_job(self.job_id) @@ -468,10 +483,10 @@ def select_records(self, records): if "Records not found for this query" in self.headers: break # Stop if no records found for row in reader: - query_records.append([row[0]]) + query_records.append([row[: len(query_fields)]]) # Post-process the query results - selected_records, error_message = random_post_process( + selected_records, error_message = self.select_post_process( query_records, num_records, self.sobject ) if error_message: @@ -595,7 +610,16 @@ def _parse_batch_results(self, f): class RestApiDmlOperation(BaseDmlOperation): """Operation class for all DML operations run using the REST API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.RANDOM, + ): super().__init__( sobject=sobject, operation=operation, @@ -617,6 +641,9 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.api_options["batch_size"] = min( self.api_options["batch_size"], MAX_REST_BATCH_SIZE ) + if selection_strategy is SelectStrategy.RANDOM: + self.select_generate_query = random_generate_query + self.select_post_process = random_post_process def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) @@ -740,23 +767,25 @@ def convert(rec, fields): self.api_options.get("batch_size"), total_num_records - offset ) # Generate the SOQL query with and LIMIT - query = random_generate_query(self.sobject, num_records) + query, query_fields = self.select_generate_query(self.sobject, num_records) # Execute the query and extract results response = self.sf.query(query) # Extract and convert 'Id' fields from the query results - query_records = list(convert(rec, ["Id"]) for rec in response["records"]) + query_records = list( + convert(rec, query_fields) for rec in response["records"] + ) # Handle pagination if there are more records within this batch while not response["done"]: response = self.sf.query_more( response["nextRecordsUrl"], identifier_is_url=True ) query_records.extend( - list(convert(rec, ["Id"]) for rec in response["records"]) + list(convert(rec, query_fields) for rec in response["records"]) ) # Post-process the query results for this batch - selected_records, error_message = random_post_process( + selected_records, error_message = self.select_post_process( query_records, num_records, self.sobject ) if error_message: @@ -889,47 +918,3 @@ def get_dml_operation( context=context, fields=fields, ) - - -def random_generate_query(sobject: str, num_records: float) -> str: - """Generates the SOQL query for the random selection strategy""" - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(sobject) - if declaration: - where_clause = declaration.where - else: - where_clause = None - # Construct the query with the WHERE clause (if it exists) - query = f"SELECT Id FROM {sobject}" - if where_clause: - query += f" WHERE {where_clause}" - query += f" LIMIT {num_records}" - - return query - - -def random_post_process(records, num_records: float, sobject: str): - """Processes the query results for the random selection strategy""" - try: - # Handle case where query returns 0 records - if not records: - error_message = f"No records found for {sobject} in the target org." - return [], error_message - - # Add 'success: True' to each record to emulate records have been inserted - selected_records = [ - {"id": record[0], "success": True, "created": False} for record in records - ] - - # If fewer records than requested, repeat existing records to match num_records - if len(selected_records) < num_records: - original_records = selected_records.copy() - while len(selected_records) < num_records: - selected_records.extend(original_records) - selected_records = selected_records[:num_records] - - return selected_records, None # Return selected records and None for error - - except Exception as e: - error_message = f"Error processing query results for {sobject}: {e}" - return [], error_message diff --git a/datasets/mapping.yml b/datasets/mapping.yml index ae7952b22c..838b8b4597 100644 --- a/datasets/mapping.yml +++ b/datasets/mapping.yml @@ -1,6 +1,7 @@ Account: sf_object: Account api: bulk + action: select fields: - Name - Description From 580a6e67961cfc70ef4a68e1af095353b9aabd88 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 20 Aug 2024 00:21:20 +0530 Subject: [PATCH 04/65] Add test cases for select_records functionality --- cumulusci/tasks/bulkdata/select_utils.py | 39 +-- cumulusci/tasks/bulkdata/step.py | 3 +- .../tasks/bulkdata/tests/test_select_utils.py | 63 ++++ cumulusci/tasks/bulkdata/tests/test_step.py | 309 ++++++++++++++++++ cumulusci/tasks/bulkdata/tests/utils.py | 3 + datasets/mapping.yml | 1 - 6 files changed, 394 insertions(+), 24 deletions(-) create mode 100644 cumulusci/tasks/bulkdata/tests/test_select_utils.py diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index b712970035..3521fa3c8e 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -29,26 +29,21 @@ def random_generate_query(sobject: str, num_records: float): def random_post_process(records, num_records: float, sobject: str): """Processes the query results for the random selection strategy""" - try: - # Handle case where query returns 0 records - if not records: - error_message = f"No records found for {sobject} in the target org." - return [], error_message - - # Add 'success: True' to each record to emulate records have been inserted - selected_records = [ - {"id": record[0], "success": True, "created": False} for record in records - ] - - # If fewer records than requested, repeat existing records to match num_records - if len(selected_records) < num_records: - original_records = selected_records.copy() - while len(selected_records) < num_records: - selected_records.extend(original_records) - selected_records = selected_records[:num_records] - - return selected_records, None # Return selected records and None for error - - except Exception as e: - error_message = f"Error processing query results for {sobject}: {e}" + # Handle case where query returns 0 records + if not records: + error_message = f"No records found for {sobject} in the target org." return [], error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"id": record[0], "success": True, "created": False} for record in records + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records, None # Return selected records and None for error diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index b0d4e31b44..1844c4caeb 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -388,7 +388,8 @@ def start(self): def end(self): self.bulk.close_job(self.job_id) - self.job_result = self._wait_for_job(self.job_id) + if not self.job_result: + self.job_result = self._wait_for_job(self.job_id) def get_prev_record_values(self, records): """Get the previous values of the records based on the update key diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py new file mode 100644 index 0000000000..c649871217 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -0,0 +1,63 @@ +from cumulusci.tasks.bulkdata.select_utils import ( + random_generate_query, + random_post_process, +) + + +# Test Cases for random_generate_query +def test_random_generate_query_with_default_record_declaration(): + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + num_records = 5 + query, fields = random_generate_query(sobject, num_records) + + assert "WHERE" in query # Ensure WHERE clause is included + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +def test_random_generate_query_without_default_record_declaration(): + sobject = "Contact" # Assuming no declaration for this object + num_records = 3 + query, fields = random_generate_query(sobject, num_records) + + assert "WHERE" not in query # No WHERE clause should be present + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +# Test Cases for random_post_process +def test_random_post_process_with_records(): + records = [["001"], ["002"], ["003"]] + num_records = 3 + sobject = "Contact" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + assert all(record["id"] in ["001", "002", "003"] for record in selected_records) + + +def test_random_post_process_with_fewer_records(): + records = [["001"]] + num_records = 3 + sobject = "Opportunity" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + # Check if records are repeated to match num_records + assert selected_records.count({"id": "001", "success": True, "created": False}) == 3 + + +def test_random_post_process_with_no_records(): + records = [] + num_records = 2 + sobject = "Lead" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index fc8cea7013..6459edb6d0 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -7,6 +7,7 @@ from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.load import LoadData +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import ( BulkApiDmlOperation, BulkApiQueryOperation, @@ -534,6 +535,104 @@ def test_get_prev_record_values(self): ) step.bulk.get_all_results_for_query_batch.assert_called_once_with("BATCH_ID") + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_random_strategy_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id +003000000000001""" + ) + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id=["003000000000001"], success=True, error=None, created=False + ) + ) + == 3 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_random_strategy_failure__no_records(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content indicating no records found + download_mock.return_value = io.StringIO("""Records not found for this query""") + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + def test_batch(self): context = mock.Mock() @@ -879,6 +978,216 @@ def test_get_prev_record_values(self): ) assert set(relevant_fields) == set(expected_relevant_fields) + @responses.activate + def test_select_records_random_strategy_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @responses.activate + def test_select_records_random_strategy_success__pagination(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": False, + "nextRecordsUrl": "https://example.com", + } + results_more = { + "records": [ + {"Id": "003000000000002"}, + {"Id": "003000000000003"}, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + step.sf.query_more = mock.Mock() + step.sf.query_more.return_value = results_more + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) + + @responses.activate + def test_select_records_random_strategy_failure__no_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = {"records": [], "done": True} + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() diff --git a/cumulusci/tasks/bulkdata/tests/utils.py b/cumulusci/tasks/bulkdata/tests/utils.py index 173f4c6122..c0db0f9515 100644 --- a/cumulusci/tasks/bulkdata/tests/utils.py +++ b/cumulusci/tasks/bulkdata/tests/utils.py @@ -98,6 +98,9 @@ def get_prev_record_values(self, records): def load_records(self, records): self.records.extend(records) + def select_records(self, records): + pass + def get_results(self): return iter(self.results) diff --git a/datasets/mapping.yml b/datasets/mapping.yml index 838b8b4597..ae7952b22c 100644 --- a/datasets/mapping.yml +++ b/datasets/mapping.yml @@ -1,7 +1,6 @@ Account: sf_object: Account api: bulk - action: select fields: - Name - Description From e230159ffefca688d881d9270a138ce7bda19b39 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 20 Aug 2024 00:25:30 +0530 Subject: [PATCH 05/65] Undo load changes for select record functioanlity --- cumulusci/tasks/bulkdata/load.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index d6adf1395a..4ae0dcf31a 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -289,12 +289,7 @@ def _execute_step( self, step, self._stream_queried_data(mapping, local_ids, query) ) step.start() - if mapping.action == DataOperationType.SELECT: - step.select_records( - self._stream_queried_data(mapping, local_ids, query) - ) - else: - step.load_records(self._stream_queried_data(mapping, local_ids, query)) + step.load_records(self._stream_queried_data(mapping, local_ids, query)) step.end() # Process Job Results @@ -341,8 +336,6 @@ def configure_step(self, mapping): self.check_simple_upsert(mapping) api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT - elif mapping.action == DataOperationType.SELECT: - action = DataOperationType.QUERY else: action = mapping.action @@ -488,11 +481,10 @@ def _process_job_results(self, mapping, step, local_ids): """Get the job results and process the results. If we're raising for row-level errors, do so; if we're inserting, store the new Ids.""" - is_insert_upsert_or_select = mapping.action in ( + is_insert_or_upsert = mapping.action in ( DataOperationType.INSERT, DataOperationType.UPSERT, DataOperationType.ETL_UPSERT, - DataOperationType.SELECT, ) conn = self.session.connection() @@ -508,7 +500,7 @@ def _process_job_results(self, mapping, step, local_ids): break # If we know we have no successful inserts, don't attempt to persist Ids. # Do, however, drain the generator to get error-checking behavior. - if is_insert_upsert_or_select and ( + if is_insert_or_upsert and ( step.job_result.records_processed - step.job_result.total_row_errors ): table = self.metadata.tables[self.ID_TABLE_NAME] @@ -524,7 +516,7 @@ def _process_job_results(self, mapping, step, local_ids): # person account Contact records so lookups to # person account Contact records get populated downstream as expected. if ( - is_insert_upsert_or_select + is_insert_or_upsert and mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping) ): @@ -539,7 +531,7 @@ def _process_job_results(self, mapping, step, local_ids): ), ) - if is_insert_upsert_or_select: + if is_insert_or_upsert: self.session.commit() def _generate_results_id_map(self, step, local_ids): From b15945203dcd185bb07a294d439fe0c3ed5669eb Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 19 Aug 2024 11:10:28 +0530 Subject: [PATCH 06/65] Core Logic for Selecting Records from Target Org --- .../extract_dataset_utils/extract_yml.py | 9 +- cumulusci/tasks/bulkdata/select_utils.py | 49 +++ cumulusci/tasks/bulkdata/step.py | 203 +++++++++++- .../tasks/bulkdata/tests/test_select_utils.py | 63 ++++ cumulusci/tasks/bulkdata/tests/test_step.py | 309 ++++++++++++++++++ cumulusci/tasks/bulkdata/tests/utils.py | 3 + 6 files changed, 620 insertions(+), 16 deletions(-) create mode 100644 cumulusci/tasks/bulkdata/select_utils.py create mode 100644 cumulusci/tasks/bulkdata/tests/test_select_utils.py diff --git a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py index 95d6b9ff97..9679da5a1a 100644 --- a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +++ b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py @@ -5,7 +5,6 @@ from pydantic import Field, validator from cumulusci.core.enums import StrEnum -from cumulusci.tasks.bulkdata.step import DataApi from cumulusci.utils.yaml.model_parser import CCIDictModel, HashableBaseModel object_decl = re.compile(r"objects\((\w+)\)", re.IGNORECASE) @@ -25,6 +24,14 @@ class SFFieldGroupTypes(StrEnum): required = "required" +class DataApi(StrEnum): + """Enum defining requested Salesforce data API for an operation.""" + + BULK = "bulk" + REST = "rest" + SMART = "smart" + + class ExtractDeclaration(HashableBaseModel): where: T.Optional[str] = None fields_: T.Union[T.List[str], str] = Field(["FIELDS(ALL)"], alias="fields") diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py new file mode 100644 index 0000000000..3521fa3c8e --- /dev/null +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -0,0 +1,49 @@ +from cumulusci.core.enums import StrEnum +from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( + DEFAULT_DECLARATIONS, +) + + +class SelectStrategy(StrEnum): + """Enum defining the different selection strategies requested.""" + + RANDOM = "random" + + +def random_generate_query(sobject: str, num_records: float): + """Generates the SOQL query for the random selection strategy""" + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + where_clause = declaration.where + else: + where_clause = None + # Construct the query with the WHERE clause (if it exists) + query = f"SELECT Id FROM {sobject}" + if where_clause: + query += f" WHERE {where_clause}" + query += f" LIMIT {num_records}" + + return query, ["Id"] + + +def random_post_process(records, num_records: float, sobject: str): + """Processes the query results for the random selection strategy""" + # Handle case where query returns 0 records + if not records: + error_message = f"No records found for {sobject} in the target org." + return [], error_message + + # Add 'success: True' to each record to emulate records have been inserted + selected_records = [ + {"id": record[0], "success": True, "created": False} for record in records + ] + + # If fewer records than requested, repeat existing records to match num_records + if len(selected_records) < num_records: + original_records = selected_records.copy() + while len(selected_records) < num_records: + selected_records.extend(original_records) + selected_records = selected_records[:num_records] + + return selected_records, None # Return selected records and None for error diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index edcb62afbb..1844c4caeb 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -15,6 +15,11 @@ from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.core.utils import process_bool_arg +from cumulusci.tasks.bulkdata.select_utils import ( + SelectStrategy, + random_generate_query, + random_post_process, +) from cumulusci.tasks.bulkdata.utils import iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict from cumulusci.utils.xml import lxml_parse_string @@ -36,6 +41,7 @@ class DataOperationType(StrEnum): UPSERT = "upsert" ETL_UPSERT = "etl_upsert" SMART_UPSERT = "smart_upsert" # currently undocumented + SELECT = "select" class DataApi(StrEnum): @@ -320,6 +326,11 @@ def get_prev_record_values(self, records): """Get the previous records values in case of UPSERT and UPDATE to prepare for rollback""" pass + @abstractmethod + def select_records(self, records): + """Perform the requested DML operation on the supplied row iterator.""" + pass + @abstractmethod def load_records(self, records): """Perform the requested DML operation on the supplied row iterator.""" @@ -338,7 +349,16 @@ def get_results(self): class BulkApiDmlOperation(BaseDmlOperation, BulkJobMixin): """Operation class for all DML operations run using the Bulk API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.RANDOM, + ): super().__init__( sobject=sobject, operation=operation, @@ -353,6 +373,10 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.csv_buff = io.StringIO(newline="") self.csv_writer = csv.writer(self.csv_buff, quoting=csv.QUOTE_ALL) + if selection_strategy is SelectStrategy.RANDOM: + self.select_generate_query = random_generate_query + self.select_post_process = random_post_process + def start(self): self.job_id = self.bulk.create_job( self.sobject, @@ -364,7 +388,8 @@ def start(self): def end(self): self.bulk.close_job(self.job_id) - self.job_result = self._wait_for_job(self.job_id) + if not self.job_result: + self.job_result = self._wait_for_job(self.job_id) def get_prev_record_values(self, records): """Get the previous values of the records based on the update key @@ -424,6 +449,62 @@ def load_records(self, records): self.context.logger.info(f"Uploading batch {count + 1}") self.batch_ids.append(self.bulk.post_batch(self.job_id, iter(csv_batch))) + def select_records(self, records): + """Executes a SOQL query to select records and adds them to results""" + + self.select_results = [] # Store selected records + + # Count total number of records to fetch + total_num_records = sum(1 for _ in records) + + # Process in batches based on batch_size from api_options + for offset in range( + 0, total_num_records, self.api_options.get("batch_size", 500) + ): + # Calculate number of records to fetch in this batch + num_records = min( + self.api_options.get("batch_size", 500), total_num_records - offset + ) + + # Generate and execute SOQL query + query, query_fields = self.select_generate_query(self.sobject, num_records) + self.batch_id = self.bulk.query(self.job_id, query) + self._wait_for_job(self.job_id) + + # Get and process query results + result_ids = self.bulk.get_query_batch_result_ids( + self.batch_id, job_id=self.job_id + ) + query_records = [] + for result_id in result_ids: + uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}" + with download_file(uri, self.bulk) as f: + reader = csv.reader(f) + self.headers = next(reader) + if "Records not found for this query" in self.headers: + break # Stop if no records found + for row in reader: + query_records.append([row[: len(query_fields)]]) + + # Post-process the query results + selected_records, error_message = self.select_post_process( + query_records, num_records, self.sobject + ) + if error_message: + break # Stop if there's an error during post-processing + + self.select_results.extend(selected_records) + + # Update job result based on selection outcome + self.job_result = DataOperationJobResult( + DataOperationStatus.SUCCESS + if len(self.select_results) + else DataOperationStatus.JOB_FAILURE, + [error_message] if error_message else [], + len(self.select_results), + 0, + ) + def _batch(self, records, n, char_limit=10000000): """Given an iterator of records, yields batches of records serialized in .csv format. @@ -472,6 +553,29 @@ def _serialize_csv_record(self, record): return serialized def get_results(self): + """ + Retrieves and processes the results of a Bulk API operation. + """ + + if self.operation is DataOperationType.QUERY: + yield from self._get_query_results() + else: + yield from self._get_batch_results() + + def _get_query_results(self): + """Handles results for QUERY (select) operations""" + for row in self.select_results: + success = process_bool_arg(row["success"]) + created = process_bool_arg(row["created"]) + yield DataOperationResult( + row["id"] if success else None, + success, + None, + created, + ) + + def _get_batch_results(self): + """Handles results for other DataOperationTypes (insert, update, etc.)""" for batch_id in self.batch_ids: try: results_url = ( @@ -481,29 +585,42 @@ def get_results(self): # to avoid the server dropping connections with download_file(results_url, self.bulk) as f: self.logger.info(f"Downloaded results for batch {batch_id}") + yield from self._parse_batch_results(f) - reader = csv.reader(f) - next(reader) # skip header - - for row in reader: - success = process_bool_arg(row[1]) - created = process_bool_arg(row[2]) - yield DataOperationResult( - row[0] if success else None, - success, - row[3] if not success else None, - created, - ) except Exception as e: raise BulkDataException( f"Failed to download results for batch {batch_id} ({str(e)})" ) + def _parse_batch_results(self, f): + """Parses batch results from the downloaded file""" + reader = csv.reader(f) + next(reader) # Skip header row + + for row in reader: + success = process_bool_arg(row[1]) + created = process_bool_arg(row[2]) + yield DataOperationResult( + row[0] if success else None, + success, + row[3] if not success else None, + created, + ) + class RestApiDmlOperation(BaseDmlOperation): """Operation class for all DML operations run using the REST API.""" - def __init__(self, *, sobject, operation, api_options, context, fields): + def __init__( + self, + *, + sobject, + operation, + api_options, + context, + fields, + selection_strategy=SelectStrategy.RANDOM, + ): super().__init__( sobject=sobject, operation=operation, @@ -525,6 +642,9 @@ def __init__(self, *, sobject, operation, api_options, context, fields): self.api_options["batch_size"] = min( self.api_options["batch_size"], MAX_REST_BATCH_SIZE ) + if selection_strategy is SelectStrategy.RANDOM: + self.select_generate_query = random_generate_query + self.select_post_process = random_post_process def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) @@ -631,6 +751,59 @@ def load_records(self, records): row_errors, ) + def select_records(self, records): + """Executes a SOQL query to select records and adds them to results""" + + def convert(rec, fields): + """Helper function to convert record values to strings, handling None values""" + return [str(rec[f]) if rec[f] is not None else "" for f in fields] + + self.results = [] + # Count the number of records to fetch + total_num_records = sum(1 for _ in records) + + # Process in batches + for offset in range(0, total_num_records, self.api_options.get("batch_size")): + num_records = min( + self.api_options.get("batch_size"), total_num_records - offset + ) + # Generate the SOQL query with and LIMIT + query, query_fields = self.select_generate_query(self.sobject, num_records) + + # Execute the query and extract results + response = self.sf.query(query) + # Extract and convert 'Id' fields from the query results + query_records = list( + convert(rec, query_fields) for rec in response["records"] + ) + # Handle pagination if there are more records within this batch + while not response["done"]: + response = self.sf.query_more( + response["nextRecordsUrl"], identifier_is_url=True + ) + query_records.extend( + list(convert(rec, query_fields) for rec in response["records"]) + ) + + # Post-process the query results for this batch + selected_records, error_message = self.select_post_process( + query_records, num_records, self.sobject + ) + if error_message: + break + # Add selected records from this batch to the overall results + self.results.extend(selected_records) + + # Update the job result based on the overall selection outcome + self.job_result = DataOperationJobResult( + DataOperationStatus.SUCCESS + if len(self.results) # Check the overall results length + else DataOperationStatus.JOB_FAILURE, + [error_message] if error_message else [], + len(self.results), + 0, + ) + def get_results(self): """Return a generator of DataOperationResult objects.""" diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py new file mode 100644 index 0000000000..c649871217 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -0,0 +1,63 @@ +from cumulusci.tasks.bulkdata.select_utils import ( + random_generate_query, + random_post_process, +) + + +# Test Cases for random_generate_query +def test_random_generate_query_with_default_record_declaration(): + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + num_records = 5 + query, fields = random_generate_query(sobject, num_records) + + assert "WHERE" in query # Ensure WHERE clause is included + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +def test_random_generate_query_without_default_record_declaration(): + sobject = "Contact" # Assuming no declaration for this object + num_records = 3 + query, fields = random_generate_query(sobject, num_records) + + assert "WHERE" not in query # No WHERE clause should be present + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +# Test Cases for random_post_process +def test_random_post_process_with_records(): + records = [["001"], ["002"], ["003"]] + num_records = 3 + sobject = "Contact" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + assert all(record["id"] in ["001", "002", "003"] for record in selected_records) + + +def test_random_post_process_with_fewer_records(): + records = [["001"]] + num_records = 3 + sobject = "Opportunity" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + # Check if records are repeated to match num_records + assert selected_records.count({"id": "001", "success": True, "created": False}) == 3 + + +def test_random_post_process_with_no_records(): + records = [] + num_records = 2 + sobject = "Lead" + selected_records, error_message = random_post_process(records, num_records, sobject) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index fc8cea7013..6459edb6d0 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -7,6 +7,7 @@ from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.load import LoadData +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import ( BulkApiDmlOperation, BulkApiQueryOperation, @@ -534,6 +535,104 @@ def test_get_prev_record_values(self): ) step.bulk.get_all_results_for_query_batch.assert_called_once_with("BATCH_ID") + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_random_strategy_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id +003000000000001""" + ) + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id=["003000000000001"], success=True, error=None, created=False + ) + ) + == 3 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_random_strategy_failure__no_records(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content indicating no records found + download_mock.return_value = io.StringIO("""Records not found for this query""") + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + def test_batch(self): context = mock.Mock() @@ -879,6 +978,216 @@ def test_get_prev_record_values(self): ) assert set(relevant_fields) == set(expected_relevant_fields) + @responses.activate + def test_select_records_random_strategy_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @responses.activate + def test_select_records_random_strategy_success__pagination(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = { + "records": [ + {"Id": "003000000000001"}, + ], + "done": False, + "nextRecordsUrl": "https://example.com", + } + results_more = { + "records": [ + {"Id": "003000000000002"}, + {"Id": "003000000000003"}, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + step.sf.query_more = mock.Mock() + step.sf.query_more.return_value = results_more + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) + + @responses.activate + def test_select_records_random_strategy_failure__no_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + ) + + results = {"records": [], "done": True} + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() diff --git a/cumulusci/tasks/bulkdata/tests/utils.py b/cumulusci/tasks/bulkdata/tests/utils.py index 173f4c6122..c0db0f9515 100644 --- a/cumulusci/tasks/bulkdata/tests/utils.py +++ b/cumulusci/tasks/bulkdata/tests/utils.py @@ -98,6 +98,9 @@ def get_prev_record_values(self, records): def load_records(self, records): self.records.extend(records) + def select_records(self, records): + pass + def get_results(self): return iter(self.results) From b11abc30898d0d68167cf22a7f0981d6f31de436 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 20 Aug 2024 14:12:30 +0530 Subject: [PATCH 07/65] Refactor select utility file and generalize arguments --- cumulusci/tasks/bulkdata/select_utils.py | 14 ++++++++++---- cumulusci/tasks/bulkdata/step.py | 17 +++++++++++------ .../tasks/bulkdata/tests/test_select_utils.py | 12 +++++++++--- 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 3521fa3c8e..48bac23578 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -1,3 +1,5 @@ +import typing as T + from cumulusci.core.enums import StrEnum from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( DEFAULT_DECLARATIONS, @@ -10,7 +12,9 @@ class SelectStrategy(StrEnum): RANDOM = "random" -def random_generate_query(sobject: str, num_records: float): +def random_generate_query( + sobject: str, num_records: float +) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the random selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available declaration = DEFAULT_DECLARATIONS.get(sobject) @@ -27,16 +31,18 @@ def random_generate_query(sobject: str, num_records: float): return query, ["Id"] -def random_post_process(records, num_records: float, sobject: str): +def random_post_process( + load_records, query_records: list, num_records: float, sobject: str +) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the random selection strategy""" # Handle case where query returns 0 records - if not records: + if not query_records: error_message = f"No records found for {sobject} in the target org." return [], error_message # Add 'success: True' to each record to emulate records have been inserted selected_records = [ - {"id": record[0], "success": True, "created": False} for record in records + {"id": record[0], "success": True, "created": False} for record in query_records ] # If fewer records than requested, repeat existing records to match num_records diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 1844c4caeb..1fe0cc80d4 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -7,6 +7,7 @@ import time from abc import ABCMeta, abstractmethod from contextlib import contextmanager +from itertools import tee from typing import Any, Dict, List, NamedTuple, Optional import requests @@ -454,8 +455,10 @@ def select_records(self, records): self.select_results = [] # Store selected records - # Count total number of records to fetch - total_num_records = sum(1 for _ in records) + # Create a copy of the generator using tee + records, records_copy = tee(records) + # Count total number of records to fetch using the copy + total_num_records = sum(1 for _ in records_copy) # Process in batches based on batch_size from api_options for offset in range( @@ -488,7 +491,7 @@ def select_records(self, records): # Post-process the query results selected_records, error_message = self.select_post_process( - query_records, num_records, self.sobject + records, query_records, num_records, self.sobject ) if error_message: break # Stop if there's an error during post-processing @@ -759,8 +762,10 @@ def convert(rec, fields): return [str(rec[f]) if rec[f] is not None else "" for f in fields] self.results = [] - # Count the number of records to fetch - total_num_records = sum(1 for _ in records) + # Create a copy of the generator using tee + records, records_copy = tee(records) + # Count total number of records to fetch using the copy + total_num_records = sum(1 for _ in records_copy) # Process in batches for offset in range(0, total_num_records, self.api_options.get("batch_size")): @@ -787,7 +792,7 @@ def convert(rec, fields): # Post-process the query results for this batch selected_records, error_message = self.select_post_process( - query_records, num_records, self.sobject + records, query_records, num_records, self.sobject ) if error_message: break diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index c649871217..43c39d63bd 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -30,7 +30,9 @@ def test_random_post_process_with_records(): records = [["001"], ["002"], ["003"]] num_records = 3 sobject = "Contact" - selected_records, error_message = random_post_process(records, num_records, sobject) + selected_records, error_message = random_post_process( + None, records, num_records, sobject + ) assert error_message is None assert len(selected_records) == num_records @@ -43,7 +45,9 @@ def test_random_post_process_with_fewer_records(): records = [["001"]] num_records = 3 sobject = "Opportunity" - selected_records, error_message = random_post_process(records, num_records, sobject) + selected_records, error_message = random_post_process( + None, records, num_records, sobject + ) assert error_message is None assert len(selected_records) == num_records @@ -57,7 +61,9 @@ def test_random_post_process_with_no_records(): records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = random_post_process(records, num_records, sobject) + selected_records, error_message = random_post_process( + None, records, num_records, sobject + ) assert selected_records == [] assert error_message == f"No records found for {sobject} in the target org." From a8b1aedd12135f860720fbb19529cf0b251bdf9f Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 20 Aug 2024 16:21:19 +0530 Subject: [PATCH 08/65] Add fields argument to select query utility function --- cumulusci/tasks/bulkdata/select_utils.py | 2 +- cumulusci/tasks/bulkdata/step.py | 14 +++++++++----- .../tasks/bulkdata/tests/test_select_utils.py | 4 ++-- cumulusci/tasks/bulkdata/tests/test_step.py | 2 +- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 48bac23578..808d2f8a2a 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -13,7 +13,7 @@ class SelectStrategy(StrEnum): def random_generate_query( - sobject: str, num_records: float + sobject: str, fields: T.List[str], num_records: float ) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the random selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 1fe0cc80d4..14f37db181 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -470,7 +470,9 @@ def select_records(self, records): ) # Generate and execute SOQL query - query, query_fields = self.select_generate_query(self.sobject, num_records) + query, query_fields = self.select_generate_query( + self.sobject, self.fields, num_records + ) self.batch_id = self.bulk.query(self.job_id, query) self._wait_for_job(self.job_id) @@ -487,7 +489,7 @@ def select_records(self, records): if "Records not found for this query" in self.headers: break # Stop if no records found for row in reader: - query_records.append([row[: len(query_fields)]]) + query_records.append(row[: len(query_fields)]) # Post-process the query results selected_records, error_message = self.select_post_process( @@ -571,9 +573,9 @@ def _get_query_results(self): success = process_bool_arg(row["success"]) created = process_bool_arg(row["created"]) yield DataOperationResult( - row["id"] if success else None, + row["id"] if success else "", success, - None, + "", created, ) @@ -773,7 +775,9 @@ def convert(rec, fields): self.api_options.get("batch_size"), total_num_records - offset ) # Generate the SOQL query with and LIMIT - query, query_fields = self.select_generate_query(self.sobject, num_records) + query, query_fields = self.select_generate_query( + self.sobject, self.fields, num_records + ) # Execute the query and extract results response = self.sf.query(query) diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 43c39d63bd..29abc845e7 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -8,7 +8,7 @@ def test_random_generate_query_with_default_record_declaration(): sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS num_records = 5 - query, fields = random_generate_query(sobject, num_records) + query, fields = random_generate_query(sobject, [], num_records) assert "WHERE" in query # Ensure WHERE clause is included assert f"LIMIT {num_records}" in query @@ -18,7 +18,7 @@ def test_random_generate_query_with_default_record_declaration(): def test_random_generate_query_without_default_record_declaration(): sobject = "Contact" # Assuming no declaration for this object num_records = 3 - query, fields = random_generate_query(sobject, num_records) + query, fields = random_generate_query(sobject, [], num_records) assert "WHERE" not in query # No WHERE clause should be present assert f"LIMIT {num_records}" in query diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 6459edb6d0..8f6f34ad90 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -581,7 +581,7 @@ def test_select_records_random_strategy_success(self, download_mock): assert ( results.count( DataOperationResult( - id=["003000000000001"], success=True, error=None, created=False + id="003000000000001", success=True, error="", created=False ) ) == 3 From 720d484018861626ff0249301464c7d3dbfb516b Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 20 Aug 2024 16:52:20 +0530 Subject: [PATCH 09/65] Move DataApi import to utils --- .../bulkdata/extract_dataset_utils/extract_yml.py | 9 +-------- cumulusci/tasks/bulkdata/step.py | 10 +--------- cumulusci/tasks/bulkdata/utils.py | 9 +++++++++ 3 files changed, 11 insertions(+), 17 deletions(-) diff --git a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py index 9679da5a1a..cec42d0bd9 100644 --- a/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py +++ b/cumulusci/tasks/bulkdata/extract_dataset_utils/extract_yml.py @@ -5,6 +5,7 @@ from pydantic import Field, validator from cumulusci.core.enums import StrEnum +from cumulusci.tasks.bulkdata.utils import DataApi from cumulusci.utils.yaml.model_parser import CCIDictModel, HashableBaseModel object_decl = re.compile(r"objects\((\w+)\)", re.IGNORECASE) @@ -24,14 +25,6 @@ class SFFieldGroupTypes(StrEnum): required = "required" -class DataApi(StrEnum): - """Enum defining requested Salesforce data API for an operation.""" - - BULK = "bulk" - REST = "rest" - SMART = "smart" - - class ExtractDeclaration(HashableBaseModel): where: T.Optional[str] = None fields_: T.Union[T.List[str], str] = Field(["FIELDS(ALL)"], alias="fields") diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 14f37db181..4770cab37e 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -21,7 +21,7 @@ random_generate_query, random_post_process, ) -from cumulusci.tasks.bulkdata.utils import iterate_in_chunks +from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict from cumulusci.utils.xml import lxml_parse_string @@ -45,14 +45,6 @@ class DataOperationType(StrEnum): SELECT = "select" -class DataApi(StrEnum): - """Enum defining requested Salesforce data API for an operation.""" - - BULK = "bulk" - REST = "rest" - SMART = "smart" - - class DataOperationStatus(StrEnum): """Enum defining outcome values for a data operation.""" diff --git a/cumulusci/tasks/bulkdata/utils.py b/cumulusci/tasks/bulkdata/utils.py index 082277fb16..b5c195a817 100644 --- a/cumulusci/tasks/bulkdata/utils.py +++ b/cumulusci/tasks/bulkdata/utils.py @@ -10,10 +10,19 @@ from sqlalchemy.engine.base import Connection from sqlalchemy.orm import Session, mapper +from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.utils.iterators import iterate_in_chunks +class DataApi(StrEnum): + """Enum defining requested Salesforce data API for an operation.""" + + BULK = "bulk" + REST = "rest" + SMART = "smart" + + class SqlAlchemyMixin: logger: logging.Logger metadata: MetaData From 2ade0c0d9a36e96e262ad8c6584d76f5b4801e71 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Fri, 23 Aug 2024 15:11:49 +0530 Subject: [PATCH 10/65] Add similarity select algorithm --- cumulusci/tasks/bulkdata/select_utils.py | 109 +++++++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 808d2f8a2a..4c40600b53 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -10,6 +10,7 @@ class SelectStrategy(StrEnum): """Enum defining the different selection strategies requested.""" RANDOM = "random" + SIMILARITY = "similarity" def random_generate_query( @@ -53,3 +54,111 @@ def random_post_process( selected_records = selected_records[:num_records] return selected_records, None # Return selected records and None for error + + +def similarity_generate_query( + sobject: str, + fields: T.List[str], + num_records: float, +) -> T.Tuple[str, T.List[str]]: + """Generates the SOQL query for the random selection strategy""" + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + where_clause = declaration.where + else: + where_clause = None + # Construct the query with the WHERE clause (if it exists) + + fields.insert(0, "Id") + fields_to_query = ", ".join(field for field in fields if field) + + query = f"SELECT {fields_to_query} FROM {sobject}" + if where_clause: + query += f" WHERE {where_clause}" + + return query, fields + + +def similarity_post_process( + load_records: list, query_records: list, num_records: float, sobject: str +) -> T.Tuple[T.List[dict], T.Union[str, None]]: + """Processes the query results for the similarity selection strategy""" + # Handle case where query returns 0 records + if not query_records: + error_message = f"No records found for {sobject} in the target org." + return [], error_message + + closest_records = [] + + for record in load_records: + closest_record = find_closest_record(record, query_records) + closest_records.append( + {"id": closest_record[0], "success": True, "created": False} + ) + + return closest_records, None + + +def find_closest_record(load_record: list, query_records: list): + closest_distance = float("inf") + closest_record = query_records[0] + + for record in query_records: + distance = calculate_levenshtein_distance(load_record, record[1:]) + if distance < closest_distance: + closest_distance = distance + closest_record = record + + return closest_record + + +def levenshtein_distance(str1: str, str2: str): + """Calculate the Levenshtein distance between two strings""" + len_str1 = len(str1) + 1 + len_str2 = len(str2) + 1 + + dp = [[0 for _ in range(len_str2)] for _ in range(len_str1)] + + for i in range(len_str1): + dp[i][0] = i + for j in range(len_str2): + dp[0][j] = j + + for i in range(1, len_str1): + for j in range(1, len_str2): + cost = 0 if str1[i - 1] == str2[j - 1] else 1 + dp[i][j] = min( + dp[i - 1][j] + 1, # Deletion + dp[i][j - 1] + 1, # Insertion + dp[i - 1][j - 1] + cost, + ) # Substitution + + return dp[-1][-1] + + +def calculate_levenshtein_distance(record1: list, record2: list): + if len(record1) != len(record2): + raise ValueError("Records must have the same number of fields.") + + total_distance = 0 + total_fields = 0 + + for field1, field2 in zip(record1, record2): + + field1 = field1.lower() + field2 = field2.lower() + + if len(field1) == 0 and len(field2) == 0: + # If both fields are blank, distance is 0 + distance = 0 + else: + distance = levenshtein_distance(field1, field2) + if len(field1) == 0 or len(field2) == 0: + # If one field is blank, reduce the impact of the distance + distance = distance * 0.05 # Fixed value for blank vs non-blank + + total_distance += distance + total_fields += 1 + + return total_distance / total_fields if total_fields > 0 else 0 From 5f6da64d94953b25f14ab29af6a9439dbddb0566 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Fri, 23 Aug 2024 15:13:54 +0530 Subject: [PATCH 11/65] Add support for similarity algorithm in step.py --- cumulusci/tasks/bulkdata/step.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 4770cab37e..8d9f39638c 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -20,6 +20,8 @@ SelectStrategy, random_generate_query, random_post_process, + similarity_generate_query, + similarity_post_process, ) from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict @@ -369,6 +371,9 @@ def __init__( if selection_strategy is SelectStrategy.RANDOM: self.select_generate_query = random_generate_query self.select_post_process = random_post_process + elif selection_strategy is SelectStrategy.SIMILARITY: + self.select_generate_query = similarity_generate_query + self.select_post_process = similarity_post_process def start(self): self.job_id = self.bulk.create_job( @@ -616,7 +621,7 @@ def __init__( api_options, context, fields, - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.SIMILARITY, ): super().__init__( sobject=sobject, @@ -642,6 +647,9 @@ def __init__( if selection_strategy is SelectStrategy.RANDOM: self.select_generate_query = random_generate_query self.select_post_process = random_post_process + elif selection_strategy is SelectStrategy.SIMILARITY: + self.select_generate_query = similarity_generate_query + self.select_post_process = similarity_post_process def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) From 30246d99f54d71bf36527440efa9071540a3a07e Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Fri, 23 Aug 2024 15:46:47 +0530 Subject: [PATCH 12/65] Add unit tests for changes made for similarity selection strategy --- .../tasks/bulkdata/tests/test_select_utils.py | 161 ++++++++ cumulusci/tasks/bulkdata/tests/test_step.py | 367 ++++++++++++++++++ 2 files changed, 528 insertions(+) diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 29abc845e7..4d084d5391 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -1,6 +1,11 @@ from cumulusci.tasks.bulkdata.select_utils import ( + calculate_levenshtein_distance, + find_closest_record, + levenshtein_distance, random_generate_query, random_post_process, + similarity_generate_query, + similarity_post_process, ) @@ -67,3 +72,159 @@ def test_random_post_process_with_no_records(): assert selected_records == [] assert error_message == f"No records found for {sobject} in the target org." + + +# Test Cases for random_generate_query +def test_similarity_generate_query_with_default_record_declaration(): + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + num_records = 5 + query, fields = similarity_generate_query(sobject, ["Name"], num_records) + + assert "WHERE" in query # Ensure WHERE clause is included + assert fields == ["Id", "Name"] + + +def test_similarity_generate_query_without_default_record_declaration(): + sobject = "Contact" # Assuming no declaration for this object + num_records = 3 + query, fields = similarity_generate_query(sobject, ["Name"], num_records) + + assert "WHERE" not in query # No WHERE clause should be present + assert fields == ["Id", "Name"] + + +def test_levenshtein_distance(): + assert levenshtein_distance("kitten", "kitten") == 0 # Identical strings + assert levenshtein_distance("kitten", "sitten") == 1 # One substitution + assert levenshtein_distance("kitten", "kitte") == 1 # One deletion + assert levenshtein_distance("kitten", "sittin") == 2 # Two substitutions + assert levenshtein_distance("kitten", "dog") == 6 # Completely different strings + assert levenshtein_distance("kitten", "") == 6 # One string is empty + assert levenshtein_distance("", "") == 0 # Both strings are empty + assert levenshtein_distance("Kitten", "kitten") == 1 # Case sensitivity + assert levenshtein_distance("kit ten", "kitten") == 1 # Strings with spaces + assert ( + levenshtein_distance("levenshtein", "meilenstein") == 4 + ) # Longer strings with multiple differences + + +def test_calculate_levenshtein_distance(): + # Identical records + record1 = ["Tom Cruise", "24", "Actor"] + record2 = ["Tom Cruise", "24", "Actor"] + assert calculate_levenshtein_distance(record1, record2) == 0 # Distance should be 0 + + # Records with one different field + record1 = ["Tom Cruise", "24", "Actor"] + record2 = ["Tom Hanks", "24", "Actor"] + assert calculate_levenshtein_distance(record1, record2) > 0 # Non-zero distance + + # One record has an empty field + record1 = ["Tom Cruise", "24", "Actor"] + record2 = ["Tom Cruise", "", "Actor"] + assert ( + calculate_levenshtein_distance(record1, record2) > 0 + ) # Distance should reflect the empty field + + # Completely empty records + record1 = ["", "", ""] + record2 = ["", "", ""] + assert calculate_levenshtein_distance(record1, record2) == 0 # Distance should be 0 + + +def test_find_closest_record(): + # Test case 1: Exact match + load_record = ["Tom Cruise", "62", "Actor"] + query_records = [ + [1, "Tom Hanks", "30", "Actor"], + [2, "Tom Cruise", "62", "Actor"], # Exact match + [3, "Jennifer Aniston", "30", "Actress"], + ] + assert find_closest_record(load_record, query_records) == [ + 2, + "Tom Cruise", + "62", + "Actor", + ] # Should return the exact match + + # Test case 2: Closest match with slight differences + load_record = ["Tom Cruise", "62", "Actor"] + query_records = [ + [1, "Tom Hanks", "62", "Actor"], + [2, "Tom Cruise", "63", "Actor"], # Slight difference + [3, "Jennifer Aniston", "30", "Actress"], + ] + assert find_closest_record(load_record, query_records) == [ + 2, + "Tom Cruise", + "63", + "Actor", + ] # Should return the closest match + + # Test case 3: All records are significantly different + load_record = ["Tom Cruise", "62", "Actor"] + query_records = [ + [1, "Brad Pitt", "30", "Producer"], + [2, "Leonardo DiCaprio", "40", "Director"], + [3, "Jennifer Aniston", "30", "Actress"], + ] + assert ( + find_closest_record(load_record, query_records) == query_records[0] + ) # Should return the first record as the closest (though none are close) + + # Test case 4: Closest match is the last in the list + load_record = ["Tom Cruise", "62", "Actor"] + query_records = [ + [1, "Johnny Depp", "50", "Actor"], + [2, "Brad Pitt", "30", "Producer"], + [3, "Tom Cruise", "62", "Actor"], # Exact match as the last record + ] + assert find_closest_record(load_record, query_records) == [ + 3, + "Tom Cruise", + "62", + "Actor", + ] # Should return the last record + + # Test case 5: Single record in query_records + load_record = ["Tom Cruise", "62", "Actor"] + query_records = [[1, "Johnny Depp", "50", "Actor"]] + assert find_closest_record(load_record, query_records) == [ + 1, + "Johnny Depp", + "50", + "Actor", + ] # Should return the only record available + + +def test_similarity_post_process_with_records(): + num_records = 1 + sobject = "Contact" + load_records = [["Tom Cruise", "62", "Actor"]] + query_records = [ + ["001", "Tom Hanks", "62", "Actor"], + ["002", "Tom Cruise", "63", "Actor"], # Slight difference + ["003", "Jennifer Aniston", "30", "Actress"], + ] + + selected_records, error_message = similarity_post_process( + load_records, query_records, num_records, sobject + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + assert all(record["id"] in ["002"] for record in selected_records) + + +def test_similarity_post_process_with_no_records(): + records = [] + num_records = 2 + sobject = "Lead" + selected_records, error_message = similarity_post_process( + None, records, num_records, sobject + ) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 8f6f34ad90..66f464006f 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -633,6 +633,137 @@ def test_select_records_random_strategy_failure__no_records(self, download_mock) assert job_result.records_processed == 0 assert job_result.total_row_errors == 0 + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Id", "Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id,Name,Email +003000000000001,Jawad,mjawadtp@example.com +003000000000002,Aditya,aditya@example.com +003000000000003,Tom,tom@example.com""" + ) + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_failure__no_records( + self, download_mock + ): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Id", "Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content indicating no records found + download_mock.return_value = io.StringIO("""Records not found for this query""") + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + def test_batch(self): context = mock.Mock() @@ -1014,6 +1145,7 @@ def test_select_records_random_strategy_success(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, ) results = { @@ -1078,6 +1210,7 @@ def test_select_records_random_strategy_success__pagination(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, ) results = { @@ -1168,6 +1301,7 @@ def test_select_records_random_strategy_failure__no_records(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], + selection_strategy=SelectStrategy.RANDOM, ) results = {"records": [], "done": True} @@ -1188,6 +1322,239 @@ def test_select_records_random_strategy_failure__no_records(self): assert job_result.records_processed == 0 assert job_result.total_row_errors == 0 + @responses.activate + def test_select_records_similarity_strategy_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["Id", "Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + results = { + "records": [ + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + { + "Id": "003000000000002", + "Name": "Aditya", + "Email": "aditya@example.com", + }, + { + "Id": "003000000000003", + "Name": "Tom Cruise", + "Email": "tomcruise@example.com", + }, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter( + [ + ["Id: 1", "Jawad", "mjawadtp@example.com"], + ["Id: 2", "Aditya", "aditya@example.com"], + ["Id: 2", "Tom", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + + @responses.activate + def test_select_records_random_similarity_success__pagination(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["Id", "Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + results = { + "records": [ + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + ], + "done": False, + "nextRecordsUrl": "https://example.com", + } + results_more = { + "records": [ + { + "Id": "003000000000002", + "Name": "Aditya", + "Email": "aditya@example.com", + }, + { + "Id": "003000000000003", + "Name": "Tom Cruise", + "Email": "tomcruise@example.com", + }, + ], + "done": True, + } + step.sf.query = mock.Mock() + step.sf.query.return_value = results + step.sf.query_more = mock.Mock() + step.sf.query_more.return_value = results_more + records = iter( + [ + ["Id: 1", "Jawad", "mjawadtp@example.com"], + ["Id: 2", "Aditya", "aditya@example.com"], + ["Id: 2", "Tom", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + + @responses.activate + def test_select_records_similarity_strategy_failure__no_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + results = {"records": [], "done": True} + step.sf.query = mock.Mock() + step.sf.query.return_value = results + records = iter( + [ + ["Id: 1", "Jawad", "mjawadtp@example.com"], + ["Id: 2", "Aditya", "aditya@example.com"], + ["Id: 2", "Tom", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE + assert ( + job_result.job_errors[0] + == "No records found for Contact in the target org." + ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() From a368803e5f4b85dcf59d08f3265de4433706f1ad Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Tue, 27 Aug 2024 15:34:56 +0530 Subject: [PATCH 13/65] Add more assertions in tests and remote list typing for load_Records --- cumulusci/tasks/bulkdata/select_utils.py | 2 +- cumulusci/tasks/bulkdata/tests/test_step.py | 44 ++++++++++++++++++++- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 4c40600b53..f40ae8d431 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -81,7 +81,7 @@ def similarity_generate_query( def similarity_post_process( - load_records: list, query_records: list, num_records: float, sobject: str + load_records, query_records: list, num_records: float, sobject: str ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 66f464006f..9fdee3adb0 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1387,7 +1387,7 @@ def test_select_records_similarity_strategy_success(self): [ ["Id: 1", "Jawad", "mjawadtp@example.com"], ["Id: 2", "Aditya", "aditya@example.com"], - ["Id: 2", "Tom", "tom@example.com"], + ["Id: 3", "Tom Cruise", "tom@example.com"], ] ) step.start() @@ -1406,6 +1406,22 @@ def test_select_records_similarity_strategy_success(self): ) == 1 ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False + ) + ) + == 1 + ) @responses.activate def test_select_records_random_similarity_success__pagination(self): @@ -1480,7 +1496,7 @@ def test_select_records_random_similarity_success__pagination(self): [ ["Id: 1", "Jawad", "mjawadtp@example.com"], ["Id: 2", "Aditya", "aditya@example.com"], - ["Id: 2", "Tom", "tom@example.com"], + ["Id: 3", "Tom Cruise", "tom@example.com"], ] ) step.start() @@ -1491,6 +1507,30 @@ def test_select_records_random_similarity_success__pagination(self): results = list(step.get_results()) assert len(results) == 3 # Expect 3 results (matching the input records count) # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) @responses.activate def test_select_records_similarity_strategy_failure__no_records(self): From 6fad397839d8b7c6fe4312d8693614004dc6d5a2 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 3 Sep 2024 03:11:23 +0530 Subject: [PATCH 14/65] Adds selection_filter and RANDOM selection strategy --- cumulusci/tasks/bulkdata/load.py | 20 +- cumulusci/tasks/bulkdata/mapping_parser.py | 9 +- cumulusci/tasks/bulkdata/select_utils.py | 78 ++- cumulusci/tasks/bulkdata/step.py | 281 ++++++++--- .../tasks/bulkdata/tests/test_select_utils.py | 107 +++- cumulusci/tasks/bulkdata/tests/test_step.py | 455 ++++++++++++------ 6 files changed, 715 insertions(+), 235 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 4ae0dcf31a..9435dfd183 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -289,7 +289,12 @@ def _execute_step( self, step, self._stream_queried_data(mapping, local_ids, query) ) step.start() - step.load_records(self._stream_queried_data(mapping, local_ids, query)) + if mapping.action == DataOperationType.SELECT: + step.select_records( + self._stream_queried_data(mapping, local_ids, query) + ) + else: + step.load_records(self._stream_queried_data(mapping, local_ids, query)) step.end() # Process Job Results @@ -336,6 +341,8 @@ def configure_step(self, mapping): self.check_simple_upsert(mapping) api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT + elif mapping.action == DataOperationType.SELECT: + action = DataOperationType.QUERY else: action = mapping.action @@ -349,6 +356,8 @@ def configure_step(self, mapping): fields=fields, api=mapping.api, volume=query.count(), + selection_strategy=mapping.selection_strategy, + selection_filter=mapping.selection_filter, ) return step, query @@ -481,10 +490,11 @@ def _process_job_results(self, mapping, step, local_ids): """Get the job results and process the results. If we're raising for row-level errors, do so; if we're inserting, store the new Ids.""" - is_insert_or_upsert = mapping.action in ( + is_insert_upsert_or_select = mapping.action in ( DataOperationType.INSERT, DataOperationType.UPSERT, DataOperationType.ETL_UPSERT, + DataOperationType.SELECT, ) conn = self.session.connection() @@ -500,7 +510,7 @@ def _process_job_results(self, mapping, step, local_ids): break # If we know we have no successful inserts, don't attempt to persist Ids. # Do, however, drain the generator to get error-checking behavior. - if is_insert_or_upsert and ( + if is_insert_upsert_or_select and ( step.job_result.records_processed - step.job_result.total_row_errors ): table = self.metadata.tables[self.ID_TABLE_NAME] @@ -516,7 +526,7 @@ def _process_job_results(self, mapping, step, local_ids): # person account Contact records so lookups to # person account Contact records get populated downstream as expected. if ( - is_insert_or_upsert + is_insert_upsert_or_select and mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping) ): @@ -531,7 +541,7 @@ def _process_job_results(self, mapping, step, local_ids): ), ) - if is_insert_or_upsert: + if is_insert_upsert_or_select: self.session.commit() def _generate_results_id_map(self, step, local_ids): diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index bb59fc6647..e812ca7d16 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -15,6 +15,7 @@ from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.dates import iso_to_date +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType from cumulusci.utils import convert_to_snake_case from cumulusci.utils.yaml.model_parser import CCIDictModel @@ -84,7 +85,7 @@ class BulkMode(StrEnum): ENUM_VALUES = { v.value.lower(): v.value - for enum in [BulkMode, DataApi, DataOperationType] + for enum in [BulkMode, DataApi, DataOperationType, SelectStrategy] for v in enum.__members__.values() } @@ -107,9 +108,13 @@ class MappingStep(CCIDictModel): ] = None # default should come from task options anchor_date: Optional[Union[str, date]] = None soql_filter: Optional[str] = None # soql_filter property + selection_strategy: SelectStrategy = SelectStrategy.STANDARD # selection strategy + selection_filter: Optional[ + str + ] = None # filter to be added at the end of select query update_key: T.Union[str, T.Tuple[str, ...]] = () # only for upserts - @validator("bulk_mode", "api", "action", pre=True) + @validator("bulk_mode", "api", "action", "selection_strategy", pre=True) def case_normalize(cls, val): if isinstance(val, Enum): return val diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index f40ae8d431..976f852540 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -1,3 +1,4 @@ +import random import typing as T from cumulusci.core.enums import StrEnum @@ -9,14 +10,52 @@ class SelectStrategy(StrEnum): """Enum defining the different selection strategies requested.""" - RANDOM = "random" + STANDARD = "standard" SIMILARITY = "similarity" + RANDOM = "random" -def random_generate_query( - sobject: str, fields: T.List[str], num_records: float +class SelectOperationExecutor: + def __init__(self, strategy: SelectStrategy): + self.strategy = strategy + + def select_generate_query( + self, sobject: str, fields: T.List[str], num_records: int + ): + # For STANDARD strategy + if self.strategy == SelectStrategy.STANDARD: + return standard_generate_query(sobject=sobject, num_records=num_records) + # For SIMILARITY strategy + elif self.strategy == SelectStrategy.SIMILARITY: + return similarity_generate_query(sobject=sobject, fields=fields) + # For RANDOM strategy + elif self.strategy == SelectStrategy.RANDOM: + return standard_generate_query(sobject=sobject, num_records=num_records) + + def select_post_process( + self, load_records, query_records: list, num_records: int, sobject: str + ): + # For STANDARD strategy + if self.strategy == SelectStrategy.STANDARD: + return standard_post_process( + query_records=query_records, num_records=num_records, sobject=sobject + ) + # For SIMILARITY strategy + elif self.strategy == SelectStrategy.SIMILARITY: + return similarity_post_process( + load_records=load_records, query_records=query_records, sobject=sobject + ) + # For RANDOM strategy + elif self.strategy == SelectStrategy.RANDOM: + return random_post_process( + query_records=query_records, num_records=num_records, sobject=sobject + ) + + +def standard_generate_query( + sobject: str, num_records: int ) -> T.Tuple[str, T.List[str]]: - """Generates the SOQL query for the random selection strategy""" + """Generates the SOQL query for the standard (as well as random) selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available declaration = DEFAULT_DECLARATIONS.get(sobject) if declaration: @@ -32,10 +71,10 @@ def random_generate_query( return query, ["Id"] -def random_post_process( - load_records, query_records: list, num_records: float, sobject: str +def standard_post_process( + query_records: list, num_records: int, sobject: str ) -> T.Tuple[T.List[dict], T.Union[str, None]]: - """Processes the query results for the random selection strategy""" + """Processes the query results for the standard selection strategy""" # Handle case where query returns 0 records if not query_records: error_message = f"No records found for {sobject} in the target org." @@ -59,9 +98,8 @@ def random_post_process( def similarity_generate_query( sobject: str, fields: T.List[str], - num_records: float, ) -> T.Tuple[str, T.List[str]]: - """Generates the SOQL query for the random selection strategy""" + """Generates the SOQL query for the similarity selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available declaration = DEFAULT_DECLARATIONS.get(sobject) if declaration: @@ -81,7 +119,7 @@ def similarity_generate_query( def similarity_post_process( - load_records, query_records: list, num_records: float, sobject: str + load_records: list, query_records: list, sobject: str ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records @@ -100,6 +138,26 @@ def similarity_post_process( return closest_records, None +def random_post_process( + query_records: list, num_records: int, sobject: str +) -> T.Tuple[T.List[dict], T.Union[str, None]]: + """Processes the query results for the random selection strategy""" + + if not query_records: + error_message = f"No records found for {sobject} in the target org." + return [], error_message + + selected_records = [] + for _ in range(num_records): # Loop 'num_records' times + # Randomly select one record from query_records + random_record = random.choice(query_records) + selected_records.append( + {"id": random_record[0], "success": True, "created": False} + ) + + return selected_records, None + + def find_closest_record(load_record: list, query_records: list): closest_distance = float("inf") closest_record = query_records[0] diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 8d9f39638c..fd25f0e19d 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -3,25 +3,23 @@ import json import os import pathlib +import re import tempfile import time from abc import ABCMeta, abstractmethod from contextlib import contextmanager from itertools import tee -from typing import Any, Dict, List, NamedTuple, Optional +from typing import Any, Dict, List, NamedTuple, Optional, Union import requests import salesforce_bulk from cumulusci.core.enums import StrEnum -from cumulusci.core.exceptions import BulkDataException +from cumulusci.core.exceptions import BulkDataException, SOQLQueryException from cumulusci.core.utils import process_bool_arg from cumulusci.tasks.bulkdata.select_utils import ( + SelectOperationExecutor, SelectStrategy, - random_generate_query, - random_post_process, - similarity_generate_query, - similarity_post_process, ) from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict @@ -352,7 +350,8 @@ def __init__( api_options, context, fields, - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, + selection_filter=None, ): super().__init__( sobject=sobject, @@ -368,12 +367,8 @@ def __init__( self.csv_buff = io.StringIO(newline="") self.csv_writer = csv.writer(self.csv_buff, quoting=csv.QUOTE_ALL) - if selection_strategy is SelectStrategy.RANDOM: - self.select_generate_query = random_generate_query - self.select_post_process = random_post_process - elif selection_strategy is SelectStrategy.SIMILARITY: - self.select_generate_query = similarity_generate_query - self.select_post_process = similarity_post_process + self.select_operation_executor = SelectOperationExecutor(selection_strategy) + self.selection_filter = selection_filter def start(self): self.job_id = self.bulk.create_job( @@ -451,7 +446,7 @@ def select_records(self, records): """Executes a SOQL query to select records and adds them to results""" self.select_results = [] # Store selected records - + query_records = [] # Create a copy of the generator using tee records, records_copy = tee(records) # Count total number of records to fetch using the copy @@ -467,34 +462,55 @@ def select_records(self, records): ) # Generate and execute SOQL query - query, query_fields = self.select_generate_query( + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( self.sobject, self.fields, num_records ) - self.batch_id = self.bulk.query(self.job_id, query) - self._wait_for_job(self.job_id) + if self.selection_filter: + # Generate user filter query if selection_filter is present (offset clause not supported) + user_query = generate_user_filter_query( + self.selection_filter, self.sobject, ["Id"], num_records, None + ) + # Execute the user query using Bulk API + user_query_executor = get_query_operation( + sobject=self.sobject, + fields=["Id"], + api_options=self.api_options, + context=self, + query=user_query, + api=DataApi.BULK, + ) + user_query_executor.query() + user_query_records = user_query_executor.get_results() - # Get and process query results - result_ids = self.bulk.get_query_batch_result_ids( - self.batch_id, job_id=self.job_id - ) - query_records = [] - for result_id in result_ids: - uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}" - with download_file(uri, self.bulk) as f: - reader = csv.reader(f) - self.headers = next(reader) - if "Records not found for this query" in self.headers: - break # Stop if no records found - for row in reader: - query_records.append(row[: len(query_fields)]) - - # Post-process the query results - selected_records, error_message = self.select_post_process( - records, query_records, num_records, self.sobject + # Find intersection based on 'Id' + user_query_ids = set(record[0] for record in user_query_records) + + # Execute the main select query using Bulk API + select_query_records = self._execute_select_query( + select_query=select_query, query_fields=query_fields ) - if error_message: - break # Stop if there's an error during post-processing + # If user_query_ids exist, filter select_query_records based on the intersection of Ids + if self.selection_filter: + query_records.extend( + record + for record in select_query_records + if record[query_fields.index("Id")] in user_query_ids + ) + else: + query_records.extend(select_query_records) + + # Post-process the query results + ( + selected_records, + error_message, + ) = self.select_operation_executor.select_post_process( + records, query_records, num_records, self.sobject + ) + if not error_message: self.select_results.extend(selected_records) # Update job result based on selection outcome @@ -507,6 +523,25 @@ def select_records(self, records): 0, ) + def _execute_select_query(self, select_query: str, query_fields: List[str]): + """Executes the select Bulk API query and retrieves the results.""" + self.batch_id = self.bulk.query(self.job_id, select_query) + self._wait_for_job(self.job_id) + result_ids = self.bulk.get_query_batch_result_ids( + self.batch_id, job_id=self.job_id + ) + select_query_records = [] + for result_id in result_ids: + uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}" + with download_file(uri, self.bulk) as f: + reader = csv.reader(f) + self.headers = next(reader) + if "Records not found for this query" in self.headers: + break + for row in reader: + select_query_records.append(row[: len(query_fields)]) + return select_query_records + def _batch(self, records, n, char_limit=10000000): """Given an iterator of records, yields batches of records serialized in .csv format. @@ -622,6 +657,7 @@ def __init__( context, fields, selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ): super().__init__( sobject=sobject, @@ -644,12 +680,9 @@ def __init__( self.api_options["batch_size"] = min( self.api_options["batch_size"], MAX_REST_BATCH_SIZE ) - if selection_strategy is SelectStrategy.RANDOM: - self.select_generate_query = random_generate_query - self.select_post_process = random_post_process - elif selection_strategy is SelectStrategy.SIMILARITY: - self.select_generate_query = similarity_generate_query - self.select_post_process = similarity_post_process + + self.select_operation_executor = SelectOperationExecutor(selection_strategy) + self.selection_filter = selection_filter def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) @@ -764,6 +797,7 @@ def convert(rec, fields): return [str(rec[f]) if rec[f] is not None else "" for f in fields] self.results = [] + query_records = [] # Create a copy of the generator using tee records, records_copy = tee(records) # Count total number of records to fetch using the copy @@ -774,32 +808,44 @@ def convert(rec, fields): num_records = min( self.api_options.get("batch_size"), total_num_records - offset ) - # Generate the SOQL query with and LIMIT - query, query_fields = self.select_generate_query( + + # Generate the SOQL query based on the selection strategy + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( self.sobject, self.fields, num_records ) - # Execute the query and extract results - response = self.sf.query(query) - # Extract and convert 'Id' fields from the query results - query_records = list( - convert(rec, query_fields) for rec in response["records"] - ) - # Handle pagination if there are more records within this batch - while not response["done"]: - response = self.sf.query_more( - response["nextRecordsUrl"], identifier_is_url=True + # If user given selection filter present, create composite request + if self.selection_filter: + user_query = generate_user_filter_query( + self.selection_filter, self.sobject, ["Id"], num_records, offset + ) + query_records.extend( + self._execute_composite_query( + select_query=select_query, + user_query=user_query, + query_fields=query_fields, + ) + ) + else: + # Handle the case where self.selection_query is None (and hence user_query is also None) + response = self.sf.restful( + requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" ) query_records.extend( list(convert(rec, query_fields) for rec in response["records"]) ) - # Post-process the query results for this batch - selected_records, error_message = self.select_post_process( - records, query_records, num_records, self.sobject - ) - if error_message: - break + # Post-process the query results for this batch + ( + selected_records, + error_message, + ) = self.select_operation_executor.select_post_process( + records, query_records, total_num_records, self.sobject + ) + if not error_message: # Add selected records from this batch to the overall results self.results.extend(selected_records) @@ -813,6 +859,65 @@ def convert(rec, fields): 0, ) + def _execute_composite_query(self, select_query, user_query, query_fields): + """Executes a composite request with two queries and returns the intersected results.""" + + def convert(rec, fields): + """Helper function to convert record values to strings, handling None values""" + return [str(rec[f]) if rec[f] is not None else "" for f in fields] + + composite_request_json = { + "compositeRequest": [ + { + "method": "GET", + "url": requests.utils.requote_uri( + f"/services/data/v{self.sf.sf_version}/query/?q={select_query}" + ), + "referenceId": "select_query", + }, + { + "method": "GET", + "url": requests.utils.requote_uri( + f"/services/data/v{self.sf.sf_version}/query/?q={user_query}" + ), + "referenceId": "user_query", + }, + ] + } + response = self.sf.restful( + "composite", method="POST", json=composite_request_json + ) + + # Extract results based on referenceId + for sub_response in response["compositeResponse"]: + if ( + sub_response["referenceId"] == "select_query" + and sub_response["httpStatusCode"] == 200 + ): + select_query_records = list( + convert(rec, query_fields) + for rec in sub_response["body"]["records"] + ) + elif ( + sub_response["referenceId"] == "user_query" + and sub_response["httpStatusCode"] == 200 + ): + user_query_records = list( + convert(rec, ["Id"]) for rec in sub_response["body"]["records"] + ) + else: + raise SOQLQueryException( + f"{sub_response['body'][0]['errorCode']}: {sub_response['body'][0]['message']}" + ) + # Find intersection based on 'Id' + user_query_ids = set(record[0] for record in user_query_records) + + return [ + record + for record in select_query_records + if record[query_fields.index("Id")] in user_query_ids + ] + def get_results(self): """Return a generator of DataOperationResult objects.""" @@ -894,6 +999,8 @@ def get_dml_operation( context: Any, volume: int, api: Optional[DataApi] = DataApi.SMART, + selection_strategy: SelectStrategy = SelectStrategy.STANDARD, + selection_filter: Union[str, None] = None, ) -> BaseDmlOperation: """Create an appropriate DmlOperation instance for the given parameters, selecting between REST and Bulk APIs based upon volume (Bulk used at volumes over 2000 records, @@ -927,4 +1034,56 @@ def get_dml_operation( api_options=api_options, context=context, fields=fields, + selection_strategy=selection_strategy, + selection_filter=selection_filter, ) + + +def generate_user_filter_query( + filter_clause: str, + sobject: str, + fields: list, + limit_clause: Union[int, None] = None, + offset_clause: Union[int, None] = None, +) -> str: + """ + Generates a SOQL query with the provided filter, object, fields, limit, and offset clauses. + Handles cases where the filter clause already contains LIMIT or OFFSET, and avoids multiple spaces. + """ + + # Extract existing LIMIT and OFFSET from filter_clause if present + existing_limit_match = re.search(r"LIMIT\s+(\d+)", filter_clause, re.IGNORECASE) + existing_offset_match = re.search(r"OFFSET\s+(\d+)", filter_clause, re.IGNORECASE) + + if existing_limit_match: + existing_limit = int(existing_limit_match.group(1)) + if limit_clause is not None: # Only apply limit_clause if it's provided + limit_clause = min(existing_limit, limit_clause) + else: + limit_clause = existing_limit + + if existing_offset_match: + existing_offset = int(existing_offset_match.group(1)) + if offset_clause is not None: + offset_clause = existing_offset + offset_clause + else: + offset_clause = existing_offset + + # Remove existing LIMIT and OFFSET from filter_clause, handling potential extra spaces + filter_clause = re.sub( + r"\s+OFFSET\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + filter_clause = re.sub( + r"\s+LIMIT\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + + # Construct the SOQL query + fields_str = ", ".join(fields) + query = f"SELECT {fields_str} FROM {sobject} {filter_clause}" + + if limit_clause is not None: + query += f" LIMIT {limit_clause}" + if offset_clause is not None: + query += f" OFFSET {offset_clause}" + + return query diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 4d084d5391..0ae97acb46 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -1,19 +1,47 @@ from cumulusci.tasks.bulkdata.select_utils import ( + SelectOperationExecutor, + SelectStrategy, calculate_levenshtein_distance, find_closest_record, levenshtein_distance, - random_generate_query, - random_post_process, - similarity_generate_query, - similarity_post_process, ) -# Test Cases for random_generate_query +# Test Cases for standard_generate_query +def test_standard_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS + num_records = 5 + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], num_records=num_records + ) + + assert "WHERE" in query # Ensure WHERE clause is included + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +def test_standard_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Contact" # Assuming no declaration for this object + num_records = 3 + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], num_records=num_records + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert f"LIMIT {num_records}" in query + assert fields == ["Id"] + + +# Test Cases for random generate query def test_random_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS num_records = 5 - query, fields = random_generate_query(sobject, [], num_records) + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], num_records=num_records + ) assert "WHERE" in query # Ensure WHERE clause is included assert f"LIMIT {num_records}" in query @@ -21,21 +49,25 @@ def test_random_generate_query_with_default_record_declaration(): def test_random_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) sobject = "Contact" # Assuming no declaration for this object num_records = 3 - query, fields = random_generate_query(sobject, [], num_records) + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], num_records=num_records + ) assert "WHERE" not in query # No WHERE clause should be present assert f"LIMIT {num_records}" in query assert fields == ["Id"] -# Test Cases for random_post_process -def test_random_post_process_with_records(): +# Test Cases for standard_post_process +def test_standard_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) records = [["001"], ["002"], ["003"]] num_records = 3 sobject = "Contact" - selected_records, error_message = random_post_process( + selected_records, error_message = select_operator.select_post_process( None, records, num_records, sobject ) @@ -46,11 +78,12 @@ def test_random_post_process_with_records(): assert all(record["id"] in ["001", "002", "003"] for record in selected_records) -def test_random_post_process_with_fewer_records(): +def test_standard_post_process_with_fewer_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) records = [["001"]] num_records = 3 sobject = "Opportunity" - selected_records, error_message = random_post_process( + selected_records, error_message = select_operator.select_post_process( None, records, num_records, sobject ) @@ -62,11 +95,41 @@ def test_random_post_process_with_fewer_records(): assert selected_records.count({"id": "001", "success": True, "created": False}) == 3 +def test_standard_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + records = [] + num_records = 2 + sobject = "Lead" + selected_records, error_message = select_operator.select_post_process( + None, records, num_records, sobject + ) + + assert selected_records == [] + assert error_message == f"No records found for {sobject} in the target org." + + +# Test cases for Random Post Process +def test_random_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) + records = [["001"], ["002"], ["003"]] + num_records = 3 + sobject = "Contact" + selected_records, error_message = select_operator.select_post_process( + None, records, num_records, sobject + ) + + assert error_message is None + assert len(selected_records) == num_records + assert all(record["success"] for record in selected_records) + assert all(record["created"] is False for record in selected_records) + + def test_random_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = random_post_process( + selected_records, error_message = select_operator.select_post_process( None, records, num_records, sobject ) @@ -74,20 +137,26 @@ def test_random_post_process_with_no_records(): assert error_message == f"No records found for {sobject} in the target org." -# Test Cases for random_generate_query +# Test Cases for Similarity Generate Query def test_similarity_generate_query_with_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS num_records = 5 - query, fields = similarity_generate_query(sobject, ["Name"], num_records) + query, fields = select_operator.select_generate_query( + sobject, ["Name"], num_records + ) assert "WHERE" in query # Ensure WHERE clause is included assert fields == ["Id", "Name"] def test_similarity_generate_query_without_default_record_declaration(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) sobject = "Contact" # Assuming no declaration for this object num_records = 3 - query, fields = similarity_generate_query(sobject, ["Name"], num_records) + query, fields = select_operator.select_generate_query( + sobject, ["Name"], num_records + ) assert "WHERE" not in query # No WHERE clause should be present assert fields == ["Id", "Name"] @@ -198,6 +267,7 @@ def test_find_closest_record(): def test_similarity_post_process_with_records(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) num_records = 1 sobject = "Contact" load_records = [["Tom Cruise", "62", "Actor"]] @@ -207,7 +277,7 @@ def test_similarity_post_process_with_records(): ["003", "Jennifer Aniston", "30", "Actress"], ] - selected_records, error_message = similarity_post_process( + selected_records, error_message = select_operator.select_post_process( load_records, query_records, num_records, sobject ) @@ -219,10 +289,11 @@ def test_similarity_post_process_with_records(): def test_similarity_post_process_with_no_records(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = similarity_post_process( + selected_records, error_message = select_operator.select_post_process( None, records, num_records, sobject ) diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 9fdee3adb0..b2904ae9c5 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -5,7 +5,7 @@ import pytest import responses -from cumulusci.core.exceptions import BulkDataException +from cumulusci.core.exceptions import BulkDataException, SOQLQueryException from cumulusci.tasks.bulkdata.load import LoadData from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import ( @@ -20,6 +20,7 @@ RestApiDmlOperation, RestApiQueryOperation, download_file, + generate_user_filter_query, get_dml_operation, get_query_operation, ) @@ -536,7 +537,7 @@ def test_get_prev_record_values(self): step.bulk.get_all_results_for_query_batch.assert_called_once_with("BATCH_ID") @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_select_records_random_strategy_success(self, download_mock): + def test_select_records_standard_strategy_success(self, download_mock): # Set up mock context and BulkApiDmlOperation context = mock.Mock() step = BulkApiDmlOperation( @@ -545,7 +546,7 @@ def test_select_records_random_strategy_success(self, download_mock): api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, ) # Mock Bulk API responses @@ -588,7 +589,7 @@ def test_select_records_random_strategy_success(self, download_mock): ) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_select_records_random_strategy_failure__no_records(self, download_mock): + def test_select_records_standard_strategy_failure__no_records(self, download_mock): # Set up mock context and BulkApiDmlOperation context = mock.Mock() step = BulkApiDmlOperation( @@ -597,7 +598,7 @@ def test_select_records_random_strategy_failure__no_records(self, download_mock) api_options={"batch_size": 10, "update_key": "LastName"}, context=context, fields=["LastName"], - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, ) # Mock Bulk API responses @@ -633,6 +634,118 @@ def test_select_records_random_strategy_failure__no_records(self, download_mock) assert job_result.records_processed == 0 assert job_result.total_row_errors == 0 + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_user_selection_filter_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName in ("Sample Name")', + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id +003000000000001 +003000000000002 +003000000000003""" + ) + # Mock the query operation + with mock.patch( + "cumulusci.tasks.bulkdata.step.get_query_operation" + ) as query_operation_mock: + query_operation_mock.return_value = mock.Mock() + query_operation_mock.return_value.query = mock.Mock() + query_operation_mock.return_value.get_results = mock.Mock() + query_operation_mock.return_value.get_results.return_value = [ + ["003000000000001"] + ] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert ( + len(results) == 3 + ) # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_user_selection_filter_failure(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName in ("Sample Name")', + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id +003000000000001 +003000000000002 +003000000000003""" + ) + # Mock the query operation + with mock.patch( + "cumulusci.tasks.bulkdata.step.get_query_operation" + ) as query_operation_mock: + query_operation_mock.return_value = mock.Mock() + query_operation_mock.return_value.query = mock.Mock() + query_operation_mock.return_value.query.side_effect = BulkDataException( + "MALFORMED QUERY" + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + with pytest.raises(BulkDataException): + step.select_records(records) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_similarity_strategy_success(self, download_mock): # Set up mock context and BulkApiDmlOperation @@ -1110,7 +1223,7 @@ def test_get_prev_record_values(self): assert set(relevant_fields) == set(expected_relevant_fields) @responses.activate - def test_select_records_random_strategy_success(self): + def test_select_records_standard_strategy_success(self): mock_describe_calls() task = _make_task( LoadData, @@ -1145,7 +1258,7 @@ def test_select_records_random_strategy_success(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, ) results = { @@ -1154,8 +1267,8 @@ def test_select_records_random_strategy_success(self): ], "done": True, } - step.sf.query = mock.Mock() - step.sf.query.return_value = results + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results records = iter([["Test1"], ["Test2"], ["Test3"]]) step.start() step.select_records(records) @@ -1175,7 +1288,7 @@ def test_select_records_random_strategy_success(self): ) @responses.activate - def test_select_records_random_strategy_success__pagination(self): + def test_select_records_standard_strategy_failure__no_records(self): mock_describe_calls() task = _make_task( LoadData, @@ -1210,63 +1323,29 @@ def test_select_records_random_strategy_success__pagination(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, ) - results = { - "records": [ - {"Id": "003000000000001"}, - ], - "done": False, - "nextRecordsUrl": "https://example.com", - } - results_more = { - "records": [ - {"Id": "003000000000002"}, - {"Id": "003000000000003"}, - ], - "done": True, - } - step.sf.query = mock.Mock() - step.sf.query.return_value = results - step.sf.query_more = mock.Mock() - step.sf.query_more.return_value = results_more + results = {"records": [], "done": True} + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results records = iter([["Test1"], ["Test2"], ["Test3"]]) step.start() step.select_records(records) step.end() - # Get the results and assert their properties - results = list(step.get_results()) - assert len(results) == 3 # Expect 3 results (matching the input records count) - # Assert that all results have the expected ID, success, and created values - assert ( - results.count( - DataOperationResult( - id="003000000000001", success=True, error="", created=False - ) - ) - == 1 - ) - assert ( - results.count( - DataOperationResult( - id="003000000000002", success=True, error="", created=False - ) - ) - == 1 - ) + # Get the job result and assert its properties for failure scenario + job_result = step.job_result + assert job_result.status == DataOperationStatus.JOB_FAILURE assert ( - results.count( - DataOperationResult( - id="003000000000003", success=True, error="", created=False - ) - ) - == 1 + job_result.job_errors[0] + == "No records found for Contact in the target org." ) + assert job_result.records_processed == 0 + assert job_result.total_row_errors == 0 @responses.activate - def test_select_records_random_strategy_failure__no_records(self): + def test_select_records_user_selection_filter_success(self): mock_describe_calls() task = _make_task( LoadData, @@ -1301,29 +1380,56 @@ def test_select_records_random_strategy_failure__no_records(self): api_options={"batch_size": 10, "update_key": "LastName"}, context=task, fields=["LastName"], - selection_strategy=SelectStrategy.RANDOM, + selection_strategy=SelectStrategy.STANDARD, + selection_filter='WHERE LastName IN ("Sample Name")', ) - results = {"records": [], "done": True} - step.sf.query = mock.Mock() - step.sf.query.return_value = results + results = { + "compositeResponse": [ + { + "body": { + "records": [ + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + {"Id": "003000000000003"}, + ] + }, + "referenceId": "select_query", + "httpStatusCode": 200, + }, + { + "body": { + "records": [ + {"Id": "003000000000001"}, + ] + }, + "referenceId": "user_query", + "httpStatusCode": 200, + }, + ] + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results records = iter([["Test1"], ["Test2"], ["Test3"]]) step.start() step.select_records(records) step.end() - # Get the job result and assert its properties for failure scenario - job_result = step.job_result - assert job_result.status == DataOperationStatus.JOB_FAILURE + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values assert ( - job_result.job_errors[0] - == "No records found for Contact in the target org." + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 ) - assert job_result.records_processed == 0 - assert job_result.total_row_errors == 0 @responses.activate - def test_select_records_similarity_strategy_success(self): + def test_select_records_user_selection_filter_failure(self): mock_describe_calls() task = _make_task( LoadData, @@ -1357,74 +1463,46 @@ def test_select_records_similarity_strategy_success(self): operation=DataOperationType.UPSERT, api_options={"batch_size": 10, "update_key": "LastName"}, context=task, - fields=["Id", "Name", "Email"], - selection_strategy=SelectStrategy.SIMILARITY, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="MALFORMED FILTER", # Applying malformed filter ) results = { - "records": [ + "compositeResponse": [ { - "Id": "003000000000001", - "Name": "Jawad", - "Email": "mjawadtp@example.com", - }, - { - "Id": "003000000000002", - "Name": "Aditya", - "Email": "aditya@example.com", + "body": { + "records": [ + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + {"Id": "003000000000003"}, + ] + }, + "referenceId": "select_query", + "httpStatusCode": 200, }, { - "Id": "003000000000003", - "Name": "Tom Cruise", - "Email": "tomcruise@example.com", + "body": [ + { + "message": "Error in MALFORMED FILTER", + "errorCode": "MALFORMED QUERY", + } + ], + "referenceId": "user_query", + "httpStatusCode": 400, }, - ], - "done": True, - } - step.sf.query = mock.Mock() - step.sf.query.return_value = results - records = iter( - [ - ["Id: 1", "Jawad", "mjawadtp@example.com"], - ["Id: 2", "Aditya", "aditya@example.com"], - ["Id: 3", "Tom Cruise", "tom@example.com"], ] - ) + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) step.start() - step.select_records(records) - step.end() - - # Get the results and assert their properties - results = list(step.get_results()) - assert len(results) == 3 # Expect 3 results (matching the input records count) - # Assert that all results have the expected ID, success, and created values - assert ( - results.count( - DataOperationResult( - id="003000000000001", success=True, error="", created=False - ) - ) - == 1 - ) - assert ( - results.count( - DataOperationResult( - id="003000000000002", success=True, error="", created=False - ) - ) - == 1 - ) - assert ( - results.count( - DataOperationResult( - id="003000000000003", success=True, error="", created=False - ) - ) - == 1 - ) + with pytest.raises(SOQLQueryException) as e: + step.select_records(records) + assert "MALFORMED QUERY" in str(e.value) @responses.activate - def test_select_records_random_similarity_success__pagination(self): + def test_select_records_similarity_strategy_success(self): mock_describe_calls() task = _make_task( LoadData, @@ -1469,12 +1547,6 @@ def test_select_records_random_similarity_success__pagination(self): "Name": "Jawad", "Email": "mjawadtp@example.com", }, - ], - "done": False, - "nextRecordsUrl": "https://example.com", - } - results_more = { - "records": [ { "Id": "003000000000002", "Name": "Aditya", @@ -1488,10 +1560,8 @@ def test_select_records_random_similarity_success__pagination(self): ], "done": True, } - step.sf.query = mock.Mock() - step.sf.query.return_value = results - step.sf.query_more = mock.Mock() - step.sf.query_more.return_value = results_more + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results records = iter( [ ["Id: 1", "Jawad", "mjawadtp@example.com"], @@ -1518,7 +1588,7 @@ def test_select_records_random_similarity_success__pagination(self): assert ( results.count( DataOperationResult( - id="003000000000001", success=True, error="", created=False + id="003000000000002", success=True, error="", created=False ) ) == 1 @@ -1526,7 +1596,7 @@ def test_select_records_random_similarity_success__pagination(self): assert ( results.count( DataOperationResult( - id="003000000000001", success=True, error="", created=False + id="003000000000003", success=True, error="", created=False ) ) == 1 @@ -1572,8 +1642,8 @@ def test_select_records_similarity_strategy_failure__no_records(self): ) results = {"records": [], "done": True} - step.sf.query = mock.Mock() - step.sf.query.return_value = results + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results records = iter( [ ["Id: 1", "Jawad", "mjawadtp@example.com"], @@ -2071,6 +2141,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, api=DataApi.BULK, volume=1, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) assert op == bulk_dml.return_value @@ -2080,6 +2152,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): fields=["Name"], api_options={}, context=context, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) op = get_dml_operation( @@ -2090,6 +2164,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, api=DataApi.REST, volume=1, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) assert op == rest_dml.return_value @@ -2099,6 +2175,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): fields=["Name"], api_options={}, context=context, + selection_strategy=SelectStrategy.SIMILARITY, + selection_filter=None, ) @mock.patch("cumulusci.tasks.bulkdata.step.BulkApiDmlOperation") @@ -2261,3 +2339,102 @@ def test_cleanup_date_strings__upsert_update(self, operation): "Name": "Bill", "attributes": {"type": "Test__c"}, }, json_out + + +import pytest + + +def test_generate_user_filter_query_basic(): + """Tests basic query generation without existing LIMIT or OFFSET.""" + filter_clause = "WHERE Name = 'John'" + sobject = "Account" + fields = ["Id", "Name"] + limit_clause = 10 + offset_clause = 5 + + expected_query = ( + "SELECT Id, Name FROM Account WHERE Name = 'John' LIMIT 10 OFFSET 5" + ) + assert ( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + == expected_query + ) + + +def test_generate_user_filter_query_existing_limit(): + """Tests handling of existing LIMIT in the filter clause.""" + filter_clause = "WHERE Name = 'John' LIMIT 20" + sobject = "Contact" + fields = ["Id", "FirstName"] + limit_clause = 5 # Should override the existing LIMIT + offset_clause = None + + expected_query = "SELECT Id, FirstName FROM Contact WHERE Name = 'John' LIMIT 5" + assert ( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + == expected_query + ) + + +def test_generate_user_filter_query_existing_offset(): + """Tests handling of existing OFFSET in the filter clause.""" + filter_clause = "WHERE Name = 'John' OFFSET 15" + sobject = "Opportunity" + fields = ["Id", "Name"] + limit_clause = None + offset_clause = 10 # Should add to the existing OFFSET + + expected_query = "SELECT Id, Name FROM Opportunity WHERE Name = 'John' OFFSET 25" + assert ( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + == expected_query + ) + + +def test_generate_user_filter_query_no_limit_or_offset(): + """Tests when no limit or offset is provided or present in the filter.""" + filter_clause = "WHERE Name = 'John' LIMIT 5 OFFSET 20" + sobject = "Lead" + fields = ["Id", "Name", "Email"] + limit_clause = None + offset_clause = None + + expected_query = ( + "SELECT Id, Name, Email FROM Lead WHERE Name = 'John' LIMIT 5 OFFSET 20" + ) + print( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + ) + assert ( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + == expected_query + ) + + +def test_generate_user_filter_query_case_insensitivity(): + """Tests case-insensitivity for LIMIT and OFFSET.""" + filter_clause = "where name = 'John' offset 5 limit 20" + sobject = "Task" + fields = ["Id", "Subject"] + limit_clause = 15 + offset_clause = 20 + + expected_query = ( + "SELECT Id, Subject FROM Task where name = 'John' LIMIT 15 OFFSET 25" + ) + assert ( + generate_user_filter_query( + filter_clause, sobject, fields, limit_clause, offset_clause + ) + == expected_query + ) From 196247a27a93e69a5356c9ad9a57468e3e59f751 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 3 Sep 2024 11:57:37 +0530 Subject: [PATCH 15/65] Add limit and offset to queries under batch processing --- cumulusci/tasks/bulkdata/select_utils.py | 25 ++++++---- cumulusci/tasks/bulkdata/step.py | 46 +++++++++++++------ .../tasks/bulkdata/tests/test_select_utils.py | 46 ++++++++++++------- 3 files changed, 79 insertions(+), 38 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 976f852540..6b9623eb59 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -20,17 +20,23 @@ def __init__(self, strategy: SelectStrategy): self.strategy = strategy def select_generate_query( - self, sobject: str, fields: T.List[str], num_records: int + self, + sobject: str, + fields: T.List[str], + limit: T.Union[int, None], + offset: T.Union[int, None], ): # For STANDARD strategy if self.strategy == SelectStrategy.STANDARD: - return standard_generate_query(sobject=sobject, num_records=num_records) + return standard_generate_query(sobject=sobject, limit=limit, offset=offset) # For SIMILARITY strategy elif self.strategy == SelectStrategy.SIMILARITY: - return similarity_generate_query(sobject=sobject, fields=fields) + return similarity_generate_query( + sobject=sobject, fields=fields, limit=limit, offset=offset + ) # For RANDOM strategy elif self.strategy == SelectStrategy.RANDOM: - return standard_generate_query(sobject=sobject, num_records=num_records) + return standard_generate_query(sobject=sobject, limit=limit, offset=offset) def select_post_process( self, load_records, query_records: list, num_records: int, sobject: str @@ -53,7 +59,7 @@ def select_post_process( def standard_generate_query( - sobject: str, num_records: int + sobject: str, limit: T.Union[int, None], offset: T.Union[int, None] ) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the standard (as well as random) selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available @@ -66,8 +72,8 @@ def standard_generate_query( query = f"SELECT Id FROM {sobject}" if where_clause: query += f" WHERE {where_clause}" - query += f" LIMIT {num_records}" - + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" return query, ["Id"] @@ -98,6 +104,8 @@ def standard_post_process( def similarity_generate_query( sobject: str, fields: T.List[str], + limit: T.Union[int, None], + offset: T.Union[int, None], ) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the similarity selection strategy""" # Get the WHERE clause from DEFAULT_DECLARATIONS if available @@ -114,7 +122,8 @@ def similarity_generate_query( query = f"SELECT {fields_to_query} FROM {sobject}" if where_clause: query += f" WHERE {where_clause}" - + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" return query, fields diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index fd25f0e19d..61f23a5808 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -462,16 +462,21 @@ def select_records(self, records): ) # Generate and execute SOQL query + # (not passing offset as it is not supported in Bulk) ( select_query, query_fields, ) = self.select_operation_executor.select_generate_query( - self.sobject, self.fields, num_records + sobject=self.sobject, fields=self.fields, limit=num_records, offset=None ) if self.selection_filter: # Generate user filter query if selection_filter is present (offset clause not supported) user_query = generate_user_filter_query( - self.selection_filter, self.sobject, ["Id"], num_records, None + filter_clause=self.selection_filter, + sobject=self.sobject, + fields=["Id"], + limit_clause=num_records, + offset_clause=None, ) # Execute the user query using Bulk API user_query_executor = get_query_operation( @@ -508,19 +513,22 @@ def select_records(self, records): selected_records, error_message, ) = self.select_operation_executor.select_post_process( - records, query_records, num_records, self.sobject + load_records=records, + query_records=query_records, + num_records=num_records, + sobject=self.sobject, ) if not error_message: self.select_results.extend(selected_records) # Update job result based on selection outcome self.job_result = DataOperationJobResult( - DataOperationStatus.SUCCESS + status=DataOperationStatus.SUCCESS if len(self.select_results) else DataOperationStatus.JOB_FAILURE, - [error_message] if error_message else [], - len(self.select_results), - 0, + job_errors=[error_message] if error_message else [], + records_processed=len(self.select_results), + total_row_errors=0, ) def _execute_select_query(self, select_query: str, query_fields: List[str]): @@ -814,13 +822,20 @@ def convert(rec, fields): select_query, query_fields, ) = self.select_operation_executor.select_generate_query( - self.sobject, self.fields, num_records + sobject=self.sobject, + fields=self.fields, + limit=num_records, + offset=offset, ) # If user given selection filter present, create composite request if self.selection_filter: user_query = generate_user_filter_query( - self.selection_filter, self.sobject, ["Id"], num_records, offset + filter_clause=self.selection_filter, + sobject=self.sobject, + fields=["Id"], + limit_clause=num_records, + offset_clause=offset, ) query_records.extend( self._execute_composite_query( @@ -843,7 +858,10 @@ def convert(rec, fields): selected_records, error_message, ) = self.select_operation_executor.select_post_process( - records, query_records, total_num_records, self.sobject + load_records=records, + query_records=query_records, + num_records=total_num_records, + sobject=self.sobject, ) if not error_message: # Add selected records from this batch to the overall results @@ -851,12 +869,12 @@ def convert(rec, fields): # Update the job result based on the overall selection outcome self.job_result = DataOperationJobResult( - DataOperationStatus.SUCCESS + status=DataOperationStatus.SUCCESS if len(self.results) # Check the overall results length else DataOperationStatus.JOB_FAILURE, - [error_message] if error_message else [], - len(self.results), - 0, + job_errors=[error_message] if error_message else [], + records_processed=len(self.results), + total_row_errors=0, ) def _execute_composite_query(self, select_query, user_query, query_fields): diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 0ae97acb46..755af5d009 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -11,26 +11,30 @@ def test_standard_generate_query_with_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - num_records = 5 + limit = 5 + offset = 2 query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], num_records=num_records + sobject=sobject, fields=[], limit=limit, offset=offset ) assert "WHERE" in query # Ensure WHERE clause is included - assert f"LIMIT {num_records}" in query + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query assert fields == ["Id"] def test_standard_generate_query_without_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) sobject = "Contact" # Assuming no declaration for this object - num_records = 3 + limit = 3 + offset = None query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], num_records=num_records + sobject=sobject, fields=[], limit=limit, offset=offset ) assert "WHERE" not in query # No WHERE clause should be present - assert f"LIMIT {num_records}" in query + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query assert fields == ["Id"] @@ -38,26 +42,30 @@ def test_standard_generate_query_without_default_record_declaration(): def test_random_generate_query_with_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - num_records = 5 + limit = 5 + offset = 2 query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], num_records=num_records + sobject=sobject, fields=[], limit=limit, offset=offset ) assert "WHERE" in query # Ensure WHERE clause is included - assert f"LIMIT {num_records}" in query + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query assert fields == ["Id"] def test_random_generate_query_without_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) sobject = "Contact" # Assuming no declaration for this object - num_records = 3 + limit = 3 + offset = None query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], num_records=num_records + sobject=sobject, fields=[], limit=limit, offset=offset ) assert "WHERE" not in query # No WHERE clause should be present - assert f"LIMIT {num_records}" in query + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query assert fields == ["Id"] @@ -141,25 +149,31 @@ def test_random_post_process_with_no_records(): def test_similarity_generate_query_with_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - num_records = 5 + limit = 5 + offset = 2 query, fields = select_operator.select_generate_query( - sobject, ["Name"], num_records + sobject, ["Name"], limit, offset ) assert "WHERE" in query # Ensure WHERE clause is included assert fields == ["Id", "Name"] + assert f"LIMIT {limit}" in query + assert f"OFFSET {offset}" in query def test_similarity_generate_query_without_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) sobject = "Contact" # Assuming no declaration for this object - num_records = 3 + limit = 3 + offset = None query, fields = select_operator.select_generate_query( - sobject, ["Name"], num_records + sobject, ["Name"], limit, offset ) assert "WHERE" not in query # No WHERE clause should be present assert fields == ["Id", "Name"] + assert f"LIMIT {limit}" in query + assert "OFFSET" not in query def test_levenshtein_distance(): From 6eca45517557b87f3008483bf512d452d148c0ce Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 3 Sep 2024 12:37:53 +0530 Subject: [PATCH 16/65] Add failure scenario for calculate_levenshtein_distance --- cumulusci/tasks/bulkdata/load.py | 1 + .../tasks/bulkdata/tests/test_select_utils.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 9435dfd183..d416fa1f63 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -342,6 +342,7 @@ def configure_step(self, mapping): api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT elif mapping.action == DataOperationType.SELECT: + # Bulk process expects DataOpertionType to be QUERY action = DataOperationType.QUERY else: action = mapping.action diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 755af5d009..fe037a0177 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -1,3 +1,5 @@ +import pytest + from cumulusci.tasks.bulkdata.select_utils import ( SelectOperationExecutor, SelectStrategy, @@ -215,6 +217,20 @@ def test_calculate_levenshtein_distance(): assert calculate_levenshtein_distance(record1, record2) == 0 # Distance should be 0 +def test_calculate_levenshtein_distance_error(): + # Identical records + record1 = ["Tom Cruise", "24", "Actor"] + record2 = [ + "Tom Cruise", + "24", + "Actor", + "SomethingElse", + ] # Record Length does not match + with pytest.raises(ValueError) as e: + calculate_levenshtein_distance(record1, record2) + assert "Records must have the same number of fields" in str(e.value) + + def test_find_closest_record(): # Test case 1: Exact match load_record = ["Tom Cruise", "62", "Actor"] From ebd5f08503e856501daf9c34c581be195250c2ba Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Wed, 4 Sep 2024 11:58:51 +0530 Subject: [PATCH 17/65] Modify functionality to return records in order of the user query to support ORDER BY operation --- cumulusci/tasks/bulkdata/step.py | 27 +++- cumulusci/tasks/bulkdata/tests/test_step.py | 145 ++++++++++++++++++++ 2 files changed, 166 insertions(+), 6 deletions(-) diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 61f23a5808..7dd025c345 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -491,7 +491,11 @@ def select_records(self, records): user_query_records = user_query_executor.get_results() # Find intersection based on 'Id' - user_query_ids = set(record[0] for record in user_query_records) + user_query_ids = ( + list(record[0] for record in user_query_records) + if user_query_records + else [] + ) # Execute the main select query using Bulk API select_query_records = self._execute_select_query( @@ -500,10 +504,16 @@ def select_records(self, records): # If user_query_ids exist, filter select_query_records based on the intersection of Ids if self.selection_filter: + # Create a dictionary to map IDs to their corresponding records + id_to_record_map = { + record[query_fields.index("Id")]: record + for record in select_query_records + } + # Extend query_records in the order of user_query_ids query_records.extend( record - for record in select_query_records - if record[query_fields.index("Id")] in user_query_ids + for id in user_query_ids + if (record := id_to_record_map.get(id)) is not None ) else: query_records.extend(select_query_records) @@ -928,12 +938,17 @@ def convert(rec, fields): f"{sub_response['body'][0]['errorCode']}: {sub_response['body'][0]['message']}" ) # Find intersection based on 'Id' - user_query_ids = set(record[0] for record in user_query_records) + user_query_ids = list(record[0] for record in user_query_records) + # Create a dictionary to map IDs to their corresponding records + id_to_record_map = { + record[query_fields.index("Id")]: record for record in select_query_records + } + # Extend query_records in the order of user_query_ids return [ record - for record in select_query_records - if record[query_fields.index("Id")] in user_query_ids + for id in user_query_ids + if (record := id_to_record_map.get(id)) is not None ] def get_results(self): diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index b2904ae9c5..046c6d3a5a 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -701,6 +701,70 @@ def test_select_records_user_selection_filter_success(self, download_mock): == 3 ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_user_selection_filter_order_success(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="ORDER BY CreatedDate", + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + download_mock.return_value = io.StringIO( + """Id +003000000000001 +003000000000002 +003000000000003""" + ) + # Mock the query operation + with mock.patch( + "cumulusci.tasks.bulkdata.step.get_query_operation" + ) as query_operation_mock: + query_operation_mock.return_value = mock.Mock() + query_operation_mock.return_value.query = mock.Mock() + query_operation_mock.return_value.get_results = mock.Mock() + query_operation_mock.return_value.get_results.return_value = [ + ["003000000000003"], + ["003000000000001"], + ["003000000000002"], + ] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) + + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert ( + len(results) == 3 + ) # Expect 3 results (matching the input records count) + # Assert that all results are in the order given by user query + assert results[0].id == "003000000000003" + assert results[1].id == "003000000000001" + assert results[2].id == "003000000000002" + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_user_selection_filter_failure(self, download_mock): # Set up mock context and BulkApiDmlOperation @@ -1428,6 +1492,87 @@ def test_select_records_user_selection_filter_success(self): == 3 ) + @responses.activate + def test_select_records_user_selection_filter_order_success(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + selection_filter="ORDER BY CreatedDate", + ) + + results = { + "compositeResponse": [ + { + "body": { + "records": [ + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + {"Id": "003000000000003"}, + ] + }, + "referenceId": "select_query", + "httpStatusCode": 200, + }, + { + "body": { + "records": [ + {"Id": "003000000000003"}, + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + ] + }, + "referenceId": "user_query", + "httpStatusCode": 200, + }, + ] + } + step.sf.restful = mock.Mock() + step.sf.restful.return_value = results + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results are in the order of user_query + assert results[0].id == "003000000000003" + assert results[1].id == "003000000000001" + assert results[2].id == "003000000000002" + @responses.activate def test_select_records_user_selection_filter_failure(self): mock_describe_calls() From 8c2bb3adb04ee2a0710b08c38b2ebc67ae997492 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Fri, 6 Sep 2024 15:39:22 +0530 Subject: [PATCH 18/65] Add documentation for 'select' functionality --- docs/data.md | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/docs/data.md b/docs/data.md index 063e3f33f5..c81ba44c90 100644 --- a/docs/data.md +++ b/docs/data.md @@ -250,6 +250,43 @@ Insert Accounts: Whenever `update_key` is supplied, the action must be `upsert` and vice versa. +### Selects + +The "select" functionality enhances the mapping process by enabling direct record selection from the target Salesforce org for lookups. This is achieved by specifying the `select` action in the mapping file, particularly useful when dealing with objects dependent on non-insertable Salesforce objects. + +```yaml +Select Accounts: + sf_object: Account + action: select + selection_strategy: standard + selection_filter: WHERE Name IN ('Bluth Company', 'Camacho PLC') + fields: + - Name + - AccountNumber +Insert Contacts: + sf_object: Contact + action: insert + fields: + - LastName + lookups: + AccountId: + table: Account +``` + +The `Select Accounts` section in this YAML demonstrates how to fetch specific records from your Salesforce org. These selected Account records will then be referenced by the subsequent `Insert Contacts` section via lookups, ensuring that new Contacts are linked to the pre-existing Accounts chosen in the `select` step rather than relying on any newly inserted Account records. + +#### Selection Strategy + +The `selection_strategy` dictates how these records are chosen: + +- `standard`: This strategy fetches records from the org in the same order as they appear, respecting any filtering applied via `selection_filter`. +- `similarity`: This strategy is employed when you want to find records in the org that closely resemble those defined in your SQL file. +- `random`: As the name suggests, this strategy randomly selects records from the org. + +#### Selection Filter + +The `selection_filter` acts as a versatile SOQL clause, providing fine-grained control over record selection. It allows filtering with `WHERE`, sorting with `ORDER BY`, limiting with `LIMIT`, and potentially utilizing other SOQL capabilities, ensuring you select the precise records needed for your chosen `selection_strategy`. + ### Database Mapping CumulusCI's definition format includes considerable flexibility for use From bb72bfb6c227d713867165ad49ef7832793f6151 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 30 Sep 2024 16:20:22 +0530 Subject: [PATCH 19/65] Fixes issue for improper batching and intersection --- cumulusci/tasks/bulkdata/select_utils.py | 19 +- cumulusci/tasks/bulkdata/step.py | 221 +++++++++++--------- cumulusci/tasks/bulkdata/tests/test_step.py | 15 +- 3 files changed, 152 insertions(+), 103 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 6b9623eb59..315e2ae349 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -15,9 +15,22 @@ class SelectStrategy(StrEnum): RANDOM = "random" +class SelectRecordRetrievalMode(StrEnum): + """Enum defining whether you need all records or match the + number of records of the local sql file""" + + ALL = "all" + MATCH = "match" + + class SelectOperationExecutor: def __init__(self, strategy: SelectStrategy): self.strategy = strategy + self.retrieval_mode = ( + SelectRecordRetrievalMode.ALL + if strategy == SelectStrategy.SIMILARITY + else SelectRecordRetrievalMode.MATCH + ) def select_generate_query( self, @@ -96,7 +109,7 @@ def standard_post_process( original_records = selected_records.copy() while len(selected_records) < num_records: selected_records.extend(original_records) - selected_records = selected_records[:num_records] + selected_records = selected_records[:num_records] return selected_records, None # Return selected records and None for error @@ -115,8 +128,8 @@ def similarity_generate_query( else: where_clause = None # Construct the query with the WHERE clause (if it exists) - - fields.insert(0, "Id") + if "Id" not in fields: + fields.insert(0, "Id") fields_to_query = ", ".join(field for field in fields if field) query = f"SELECT {fields_to_query} FROM {sobject}" diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 7dd025c345..5f100aa88d 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -19,6 +19,7 @@ from cumulusci.core.utils import process_bool_arg from cumulusci.tasks.bulkdata.select_utils import ( SelectOperationExecutor, + SelectRecordRetrievalMode, SelectStrategy, ) from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks @@ -452,71 +453,66 @@ def select_records(self, records): # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) - # Process in batches based on batch_size from api_options - for offset in range( - 0, total_num_records, self.api_options.get("batch_size", 500) - ): - # Calculate number of records to fetch in this batch - num_records = min( - self.api_options.get("batch_size", 500), total_num_records - offset + # Since OFFSET is not supported in bulk, we can run only over 1 api_batch_size + # Generate and execute SOQL query + # (not passing offset as it is not supported in Bulk) + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + limit=self.api_options.get("batch_size", 500), + offset=None, + ) + if self.selection_filter: + # Generate user filter query if selection_filter is present (offset clause not supported) + user_query = generate_user_filter_query( + filter_clause=self.selection_filter, + sobject=self.sobject, + fields=["Id"], + limit_clause=self.api_options.get("batch_size", 500), + offset_clause=None, ) - - # Generate and execute SOQL query - # (not passing offset as it is not supported in Bulk) - ( - select_query, - query_fields, - ) = self.select_operation_executor.select_generate_query( - sobject=self.sobject, fields=self.fields, limit=num_records, offset=None + # Execute the user query using Bulk API + user_query_executor = get_query_operation( + sobject=self.sobject, + fields=["Id"], + api_options=self.api_options, + context=self, + query=user_query, + api=DataApi.BULK, ) - if self.selection_filter: - # Generate user filter query if selection_filter is present (offset clause not supported) - user_query = generate_user_filter_query( - filter_clause=self.selection_filter, - sobject=self.sobject, - fields=["Id"], - limit_clause=num_records, - offset_clause=None, - ) - # Execute the user query using Bulk API - user_query_executor = get_query_operation( - sobject=self.sobject, - fields=["Id"], - api_options=self.api_options, - context=self, - query=user_query, - api=DataApi.BULK, - ) - user_query_executor.query() - user_query_records = user_query_executor.get_results() - - # Find intersection based on 'Id' - user_query_ids = ( - list(record[0] for record in user_query_records) - if user_query_records - else [] - ) - - # Execute the main select query using Bulk API - select_query_records = self._execute_select_query( - select_query=select_query, query_fields=query_fields + user_query_executor.query() + user_query_records = user_query_executor.get_results() + + # Find intersection based on 'Id' + user_query_ids = ( + list(record[0] for record in user_query_records) + if user_query_records + else [] ) - # If user_query_ids exist, filter select_query_records based on the intersection of Ids - if self.selection_filter: - # Create a dictionary to map IDs to their corresponding records - id_to_record_map = { - record[query_fields.index("Id")]: record - for record in select_query_records - } - # Extend query_records in the order of user_query_ids - query_records.extend( - record - for id in user_query_ids - if (record := id_to_record_map.get(id)) is not None - ) - else: - query_records.extend(select_query_records) + # Execute the main select query using Bulk API + select_query_records = self._execute_select_query( + select_query=select_query, query_fields=query_fields + ) + + # If user_query_ids exist, filter select_query_records based on the intersection of Ids + if self.selection_filter: + # Create a dictionary to map IDs to their corresponding records + id_to_record_map = { + record[query_fields.index("Id")]: record + for record in select_query_records + } + # Extend query_records in the order of user_query_ids + query_records.extend( + record + for id in user_query_ids + if (record := id_to_record_map.get(id)) is not None + ) + else: + query_records.extend(select_query_records) # Post-process the query results ( @@ -525,7 +521,7 @@ def select_records(self, records): ) = self.select_operation_executor.select_post_process( load_records=records, query_records=query_records, - num_records=num_records, + num_records=total_num_records, sobject=self.sobject, ) if not error_message: @@ -674,7 +670,7 @@ def __init__( api_options, context, fields, - selection_strategy=SelectStrategy.SIMILARITY, + selection_strategy=SelectStrategy.STANDARD, selection_filter=None, ): super().__init__( @@ -816,17 +812,25 @@ def convert(rec, fields): self.results = [] query_records = [] + user_query_records = [] # Create a copy of the generator using tee records, records_copy = tee(records) # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) + # Set offset + offset = 0 - # Process in batches - for offset in range(0, total_num_records, self.api_options.get("batch_size")): - num_records = min( - self.api_options.get("batch_size"), total_num_records - offset - ) + # Define condition + def condition(retrieval_mode, offset, total_num_records): + if retrieval_mode == SelectRecordRetrievalMode.ALL: + return True + elif retrieval_mode == SelectRecordRetrievalMode.MATCH: + return offset < total_num_records + # Process in batches + while condition( + self.select_operation_executor.retrieval_mode, offset, total_num_records + ): # Generate the SOQL query based on the selection strategy ( select_query, @@ -834,34 +838,74 @@ def convert(rec, fields): ) = self.select_operation_executor.select_generate_query( sobject=self.sobject, fields=self.fields, - limit=num_records, + limit=self.api_options.get("batch_size"), offset=offset, ) # If user given selection filter present, create composite request if self.selection_filter: + # Generate user query user_query = generate_user_filter_query( filter_clause=self.selection_filter, sobject=self.sobject, fields=["Id"], - limit_clause=num_records, + limit_clause=self.api_options.get("batch_size"), offset_clause=offset, ) - query_records.extend( - self._execute_composite_query( - select_query=select_query, - user_query=user_query, - query_fields=query_fields, - ) + # Execute composite query + ( + current_user_query_records, + current_query_records, + ) = self._execute_composite_query( + select_query=select_query, + user_query=user_query, + query_fields=query_fields, ) + # Break if org has no more records + if ( + len(current_user_query_records) == 0 + and len(current_query_records) == 0 + ): + break + + # Extend to each + user_query_records.extend(current_user_query_records) + query_records.extend(current_query_records) + else: # Handle the case where self.selection_query is None (and hence user_query is also None) response = self.sf.restful( requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" ) - query_records.extend( - list(convert(rec, query_fields) for rec in response["records"]) + current_query_records = list( + convert(rec, query_fields) for rec in response["records"] ) + # Break if nothing is returned + if len(current_query_records) == 0: + break + # Extend the query records + query_records.extend(current_query_records) + + # Update offset + offset += self.api_options.get("batch_size") + + # Find intersection if filter given + if self.selection_filter: + # Find intersection based on 'Id' + user_query_ids = list(record[0] for record in user_query_records) + # Create a dictionary to map IDs to their corresponding records + id_to_record_map = { + record[query_fields.index("Id")]: record for record in query_records + } + + # Extend insersection_query_records in the order of user_query_ids + insersection_query_records = [ + record + for id in user_query_ids + if (record := id_to_record_map.get(id)) is not None + ] + else: + insersection_query_records = query_records # Post-process the query results for this batch ( @@ -869,7 +913,7 @@ def convert(rec, fields): error_message, ) = self.select_operation_executor.select_post_process( load_records=records, - query_records=query_records, + query_records=insersection_query_records, num_records=total_num_records, sobject=self.sobject, ) @@ -888,7 +932,7 @@ def convert(rec, fields): ) def _execute_composite_query(self, select_query, user_query, query_fields): - """Executes a composite request with two queries and returns the intersected results.""" + """Executes a composite request with two queries and returns the results.""" def convert(rec, fields): """Helper function to convert record values to strings, handling None values""" @@ -937,19 +981,8 @@ def convert(rec, fields): raise SOQLQueryException( f"{sub_response['body'][0]['errorCode']}: {sub_response['body'][0]['message']}" ) - # Find intersection based on 'Id' - user_query_ids = list(record[0] for record in user_query_records) - # Create a dictionary to map IDs to their corresponding records - id_to_record_map = { - record[query_fields.index("Id")]: record for record in select_query_records - } - # Extend query_records in the order of user_query_ids - return [ - record - for id in user_query_ids - if (record := id_to_record_map.get(id)) is not None - ] + return user_query_records, select_query_records def get_results(self): """Return a generator of DataOperationResult objects.""" @@ -1076,8 +1109,8 @@ def generate_user_filter_query( filter_clause: str, sobject: str, fields: list, - limit_clause: Union[int, None] = None, - offset_clause: Union[int, None] = None, + limit_clause: Union[float, None] = None, + offset_clause: Union[float, None] = None, ) -> str: """ Generates a SOQL query with the provided filter, object, fields, limit, and offset clauses. diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 046c6d3a5a..c182a92996 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1685,7 +1685,7 @@ def test_select_records_similarity_strategy_success(self): selection_strategy=SelectStrategy.SIMILARITY, ) - results = { + results_first_call = { "records": [ { "Id": "003000000000001", @@ -1705,13 +1705,16 @@ def test_select_records_similarity_strategy_success(self): ], "done": True, } - step.sf.restful = mock.Mock() - step.sf.restful.return_value = results + + # First call returns `results_first_call`, second call returns an empty list + step.sf.restful = mock.Mock( + side_effect=[results_first_call, {"records": [], "done": True}] + ) records = iter( [ - ["Id: 1", "Jawad", "mjawadtp@example.com"], - ["Id: 2", "Aditya", "aditya@example.com"], - ["Id: 3", "Tom Cruise", "tom@example.com"], + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tom@example.com"], ] ) step.start() From ada400f9d9cfd57193a33f9b5e45b57ee41f9879 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 1 Oct 2024 12:45:29 +0530 Subject: [PATCH 20/65] Override user filter with our filters Also solve issue where offset if greater than 2000, was causing an issue --- cumulusci/tasks/bulkdata/load.py | 19 +- cumulusci/tasks/bulkdata/select_utils.py | 108 ++++++++--- cumulusci/tasks/bulkdata/step.py | 235 +++++------------------ 3 files changed, 154 insertions(+), 208 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index d416fa1f63..f83199050a 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -313,6 +313,7 @@ def configure_step(self, mapping): """Create a step appropriate to the action""" bulk_mode = mapping.bulk_mode or self.bulk_mode or "Parallel" api_options = {"batch_size": mapping.batch_size, "bulk_mode": bulk_mode} + num_records_in_target = None fields = mapping.get_load_field_list() @@ -344,11 +345,27 @@ def configure_step(self, mapping): elif mapping.action == DataOperationType.SELECT: # Bulk process expects DataOpertionType to be QUERY action = DataOperationType.QUERY + # Determine number of records in the target org + record_count_response = self.sf.restful( + f"limits/recordCount?sObjects={mapping.sf_object}" + ) + sobject_map = { + entry["name"]: entry["count"] + for entry in record_count_response["sObjects"] + } + num_records_in_target = sobject_map.get(mapping.sf_object, None) else: action = mapping.action query = self._query_db(mapping) + # Set volume + volume = ( + num_records_in_target + if num_records_in_target is not None + else query.count() + ) + step = get_dml_operation( sobject=mapping.sf_object, operation=action, @@ -356,7 +373,7 @@ def configure_step(self, mapping): context=self, fields=fields, api=mapping.api, - volume=query.count(), + volume=volume, selection_strategy=mapping.selection_strategy, selection_filter=mapping.selection_filter, ) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 315e2ae349..daa993d045 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -1,4 +1,5 @@ import random +import re import typing as T from cumulusci.core.enums import StrEnum @@ -36,20 +37,29 @@ def select_generate_query( self, sobject: str, fields: T.List[str], + user_filter: str, limit: T.Union[int, None], offset: T.Union[int, None], ): # For STANDARD strategy if self.strategy == SelectStrategy.STANDARD: - return standard_generate_query(sobject=sobject, limit=limit, offset=offset) + return standard_generate_query( + sobject=sobject, user_filter=user_filter, limit=limit, offset=offset + ) # For SIMILARITY strategy elif self.strategy == SelectStrategy.SIMILARITY: return similarity_generate_query( - sobject=sobject, fields=fields, limit=limit, offset=offset + sobject=sobject, + fields=fields, + user_filter=user_filter, + limit=limit, + offset=offset, ) # For RANDOM strategy elif self.strategy == SelectStrategy.RANDOM: - return standard_generate_query(sobject=sobject, limit=limit, offset=offset) + return standard_generate_query( + sobject=sobject, user_filter=user_filter, limit=limit, offset=offset + ) def select_post_process( self, load_records, query_records: list, num_records: int, sobject: str @@ -72,21 +82,26 @@ def select_post_process( def standard_generate_query( - sobject: str, limit: T.Union[int, None], offset: T.Union[int, None] + sobject: str, + user_filter: str, + limit: T.Union[int, None], + offset: T.Union[int, None], ) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the standard (as well as random) selection strategy""" - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(sobject) - if declaration: - where_clause = declaration.where - else: - where_clause = None - # Construct the query with the WHERE clause (if it exists) + query = f"SELECT Id FROM {sobject}" - if where_clause: - query += f" WHERE {where_clause}" - query += f" LIMIT {limit}" if limit else "" - query += f" OFFSET {offset}" if offset else "" + # If user specifies user_filter + if user_filter: + query += add_limit_offset_to_user_filter( + filter_clause=user_filter, limit_clause=limit, offset_clause=offset + ) + else: + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + query += f" WHERE {declaration.where}" + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" return query, ["Id"] @@ -117,26 +132,29 @@ def standard_post_process( def similarity_generate_query( sobject: str, fields: T.List[str], + user_filter: str, limit: T.Union[int, None], offset: T.Union[int, None], ) -> T.Tuple[str, T.List[str]]: """Generates the SOQL query for the similarity selection strategy""" - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(sobject) - if declaration: - where_clause = declaration.where - else: - where_clause = None # Construct the query with the WHERE clause (if it exists) if "Id" not in fields: fields.insert(0, "Id") fields_to_query = ", ".join(field for field in fields if field) query = f"SELECT {fields_to_query} FROM {sobject}" - if where_clause: - query += f" WHERE {where_clause}" - query += f" LIMIT {limit}" if limit else "" - query += f" OFFSET {offset}" if offset else "" + + if user_filter: + query += add_limit_offset_to_user_filter( + filter_clause=user_filter, limit_clause=limit, offset_clause=offset + ) + else: + # Get the WHERE clause from DEFAULT_DECLARATIONS if available + declaration = DEFAULT_DECLARATIONS.get(sobject) + if declaration: + query += f" WHERE {declaration.where}" + query += f" LIMIT {limit}" if limit else "" + query += f" OFFSET {offset}" if offset else "" return query, fields @@ -242,3 +260,43 @@ def calculate_levenshtein_distance(record1: list, record2: list): total_fields += 1 return total_distance / total_fields if total_fields > 0 else 0 + + +def add_limit_offset_to_user_filter( + filter_clause: str, + limit_clause: T.Union[float, None] = None, + offset_clause: T.Union[float, None] = None, +) -> str: + + # Extract existing LIMIT and OFFSET from filter_clause if present + existing_limit_match = re.search(r"LIMIT\s+(\d+)", filter_clause, re.IGNORECASE) + existing_offset_match = re.search(r"OFFSET\s+(\d+)", filter_clause, re.IGNORECASE) + + if existing_limit_match: + existing_limit = int(existing_limit_match.group(1)) + if limit_clause is not None: # Only apply limit_clause if it's provided + limit_clause = min(existing_limit, limit_clause) + else: + limit_clause = existing_limit + + if existing_offset_match: + existing_offset = int(existing_offset_match.group(1)) + if offset_clause is not None: + offset_clause = existing_offset + offset_clause + else: + offset_clause = existing_offset + + # Remove existing LIMIT and OFFSET from filter_clause, handling potential extra spaces + filter_clause = re.sub( + r"\s+OFFSET\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + filter_clause = re.sub( + r"\s+LIMIT\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE + ).strip() + + if limit_clause is not None: + filter_clause += f" LIMIT {limit_clause}" + if offset_clause is not None: + filter_clause += f" OFFSET {offset_clause}" + + return f" {filter_clause}" diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 5f100aa88d..3f3fbaf0f3 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -3,7 +3,6 @@ import json import os import pathlib -import re import tempfile import time from abc import ABCMeta, abstractmethod @@ -453,7 +452,18 @@ def select_records(self, records): # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) - # Since OFFSET is not supported in bulk, we can run only over 1 api_batch_size + # Set LIMIT condition + if ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.ALL + ): + limit_clause = None + elif ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.MATCH + ): + limit_clause = total_num_records + # Generate and execute SOQL query # (not passing offset as it is not supported in Bulk) ( @@ -462,58 +472,17 @@ def select_records(self, records): ) = self.select_operation_executor.select_generate_query( sobject=self.sobject, fields=self.fields, - limit=self.api_options.get("batch_size", 500), + user_filter=self.selection_filter if self.selection_filter else None, + limit=limit_clause, offset=None, ) - if self.selection_filter: - # Generate user filter query if selection_filter is present (offset clause not supported) - user_query = generate_user_filter_query( - filter_clause=self.selection_filter, - sobject=self.sobject, - fields=["Id"], - limit_clause=self.api_options.get("batch_size", 500), - offset_clause=None, - ) - # Execute the user query using Bulk API - user_query_executor = get_query_operation( - sobject=self.sobject, - fields=["Id"], - api_options=self.api_options, - context=self, - query=user_query, - api=DataApi.BULK, - ) - user_query_executor.query() - user_query_records = user_query_executor.get_results() - - # Find intersection based on 'Id' - user_query_ids = ( - list(record[0] for record in user_query_records) - if user_query_records - else [] - ) # Execute the main select query using Bulk API select_query_records = self._execute_select_query( select_query=select_query, query_fields=query_fields ) - # If user_query_ids exist, filter select_query_records based on the intersection of Ids - if self.selection_filter: - # Create a dictionary to map IDs to their corresponding records - id_to_record_map = { - record[query_fields.index("Id")]: record - for record in select_query_records - } - # Extend query_records in the order of user_query_ids - query_records.extend( - record - for id in user_query_ids - if (record := id_to_record_map.get(id)) is not None - ) - else: - query_records.extend(select_query_records) - + query_records.extend(select_query_records) # Post-process the query results ( selected_records, @@ -812,100 +781,52 @@ def convert(rec, fields): self.results = [] query_records = [] - user_query_records = [] # Create a copy of the generator using tee records, records_copy = tee(records) # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) - # Set offset - offset = 0 - - # Define condition - def condition(retrieval_mode, offset, total_num_records): - if retrieval_mode == SelectRecordRetrievalMode.ALL: - return True - elif retrieval_mode == SelectRecordRetrievalMode.MATCH: - return offset < total_num_records - - # Process in batches - while condition( - self.select_operation_executor.retrieval_mode, offset, total_num_records - ): - # Generate the SOQL query based on the selection strategy - ( - select_query, - query_fields, - ) = self.select_operation_executor.select_generate_query( - sobject=self.sobject, - fields=self.fields, - limit=self.api_options.get("batch_size"), - offset=offset, - ) - # If user given selection filter present, create composite request - if self.selection_filter: - # Generate user query - user_query = generate_user_filter_query( - filter_clause=self.selection_filter, - sobject=self.sobject, - fields=["Id"], - limit_clause=self.api_options.get("batch_size"), - offset_clause=offset, - ) - # Execute composite query - ( - current_user_query_records, - current_query_records, - ) = self._execute_composite_query( - select_query=select_query, - user_query=user_query, - query_fields=query_fields, - ) - # Break if org has no more records - if ( - len(current_user_query_records) == 0 - and len(current_query_records) == 0 - ): - break + # Set LIMIT condition + if ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.ALL + ): + limit_clause = None + elif ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.MATCH + ): + limit_clause = total_num_records - # Extend to each - user_query_records.extend(current_user_query_records) - query_records.extend(current_query_records) + # Generate the SOQL query based on the selection strategy + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + user_filter=self.selection_filter if self.selection_filter else None, + limit=limit_clause, + offset=None, + ) - else: - # Handle the case where self.selection_query is None (and hence user_query is also None) - response = self.sf.restful( - requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" + # Handle the case where self.selection_query is None (and hence user_query is also None) + response = self.sf.restful( + requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" + ) + query_records.extend( + list(convert(rec, query_fields) for rec in response["records"]) + ) + while True: + if not response["done"]: + response = self.sf.query_more( + response["nextRecordsUrl"], identifier_is_url=True ) - current_query_records = list( - convert(rec, query_fields) for rec in response["records"] + query_records.extend( + list(convert(rec, query_fields) for rec in response["records"]) ) - # Break if nothing is returned - if len(current_query_records) == 0: - break - # Extend the query records - query_records.extend(current_query_records) - - # Update offset - offset += self.api_options.get("batch_size") - - # Find intersection if filter given - if self.selection_filter: - # Find intersection based on 'Id' - user_query_ids = list(record[0] for record in user_query_records) - # Create a dictionary to map IDs to their corresponding records - id_to_record_map = { - record[query_fields.index("Id")]: record for record in query_records - } - - # Extend insersection_query_records in the order of user_query_ids - insersection_query_records = [ - record - for id in user_query_ids - if (record := id_to_record_map.get(id)) is not None - ] - else: - insersection_query_records = query_records + else: + break # Post-process the query results for this batch ( @@ -913,7 +834,7 @@ def condition(retrieval_mode, offset, total_num_records): error_message, ) = self.select_operation_executor.select_post_process( load_records=records, - query_records=insersection_query_records, + query_records=query_records, num_records=total_num_records, sobject=self.sobject, ) @@ -1103,53 +1024,3 @@ def get_dml_operation( selection_strategy=selection_strategy, selection_filter=selection_filter, ) - - -def generate_user_filter_query( - filter_clause: str, - sobject: str, - fields: list, - limit_clause: Union[float, None] = None, - offset_clause: Union[float, None] = None, -) -> str: - """ - Generates a SOQL query with the provided filter, object, fields, limit, and offset clauses. - Handles cases where the filter clause already contains LIMIT or OFFSET, and avoids multiple spaces. - """ - - # Extract existing LIMIT and OFFSET from filter_clause if present - existing_limit_match = re.search(r"LIMIT\s+(\d+)", filter_clause, re.IGNORECASE) - existing_offset_match = re.search(r"OFFSET\s+(\d+)", filter_clause, re.IGNORECASE) - - if existing_limit_match: - existing_limit = int(existing_limit_match.group(1)) - if limit_clause is not None: # Only apply limit_clause if it's provided - limit_clause = min(existing_limit, limit_clause) - else: - limit_clause = existing_limit - - if existing_offset_match: - existing_offset = int(existing_offset_match.group(1)) - if offset_clause is not None: - offset_clause = existing_offset + offset_clause - else: - offset_clause = existing_offset - - # Remove existing LIMIT and OFFSET from filter_clause, handling potential extra spaces - filter_clause = re.sub( - r"\s+OFFSET\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE - ).strip() - filter_clause = re.sub( - r"\s+LIMIT\s+\d+\s*", " ", filter_clause, flags=re.IGNORECASE - ).strip() - - # Construct the SOQL query - fields_str = ", ".join(fields) - query = f"SELECT {fields_str} FROM {sobject} {filter_clause}" - - if limit_clause is not None: - query += f" LIMIT {limit_clause}" - if offset_clause is not None: - query += f" OFFSET {offset_clause}" - - return query From 40097c17ea86b4229685f1218cc6c39603fc2b70 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Tue, 5 Nov 2024 19:46:39 +0530 Subject: [PATCH 21/65] Add ANN algorithm for large number of records for similarity strategy --- .pre-commit-config.yaml | 6 +- cumulusci/tasks/bulkdata/select_utils.py | 203 ++++++++++++++++++++++- 2 files changed, 205 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62af507949..b1a928eafd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/ambv/black - rev: 22.3.0 + rev: 24.10.0 hooks: - id: black - repo: https://github.com/pre-commit/pre-commit-hooks @@ -18,12 +18,12 @@ repos: - id: rst-linter exclude: "docs" - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort args: ["--profile", "black", "--filter-files"] - repo: https://github.com/pre-commit/mirrors-prettier - rev: v2.5.1 + rev: v4.0.0-alpha.8 hooks: - id: prettier - repo: local diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index daa993d045..741ed17056 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -2,6 +2,12 @@ import re import typing as T +import numpy as np +import pandas as pd +from annoy import AnnoyIndex +from sklearn.feature_extraction.text import HashingVectorizer +from sklearn.preprocessing import StandardScaler + from cumulusci.core.enums import StrEnum from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( DEFAULT_DECLARATIONS, @@ -159,7 +165,7 @@ def similarity_generate_query( def similarity_post_process( - load_records: list, query_records: list, sobject: str + load_records, query_records: list, sobject: str ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records @@ -167,6 +173,107 @@ def similarity_post_process( error_message = f"No records found for {sobject} in the target org." return [], error_message + load_records = list(load_records) + load_record_count, query_record_count = len(load_records), len(query_records) + + complexity_constant = load_record_count * query_record_count + + print(complexity_constant) + + closest_records = [] + + if complexity_constant < 1000: + closest_records = annoy_post_process(load_records, query_records) + else: + closest_records = levenshtein_post_process(load_records, query_records) + + print(closest_records) + + return closest_records + + +def annoy_post_process( + load_records: list, query_records: list +) -> T.Tuple[T.List[dict], T.Union[str, None]]: + """Processes the query results for the similarity selection strategy using Annoy algorithm for large number of records""" + + query_records = replace_empty_strings_with_missing(query_records) + load_records = replace_empty_strings_with_missing(load_records) + + print("Query records: ") + print(query_records) + + print("Load records: ") + print(load_records) + + print("\n\n\n\n") + + hash_features = 100 + num_trees = 10 + + query_record_ids = [record[0] for record in query_records] + query_record_data = [record[1:] for record in query_records] + + record_to_id_map = { + tuple(query_record_data[i]): query_record_ids[i] + for i in range(len(query_records)) + } + + final_load_vectors, final_query_vectors = vectorize_records( + load_records, query_record_data, hash_features=hash_features + ) + + # Create Annoy index for nearest neighbor search + vector_dimension = final_query_vectors.shape[1] + annoy_index = AnnoyIndex(vector_dimension, "euclidean") + + for i in range(len(final_query_vectors)): + annoy_index.add_item(i, final_query_vectors[i]) + + # Build the index + annoy_index.build(num_trees) + + # Find nearest neighbors for each query vector + n_neighbors = 1 + + closest_records = [] + + for i, load_vector in enumerate(final_load_vectors): + # Get nearest neighbors' indices and distances + nearest_neighbors = annoy_index.get_nns_by_vector( + load_vector, n_neighbors, include_distances=True + ) + neighbor_indices = nearest_neighbors[0] # Indices of nearest neighbors + distances = nearest_neighbors[1] # Distances to nearest neighbors + + load_record = load_records[i] # Get the query record for the current index + print(f"Load record {i + 1}: {load_record}\n") # Print the query record + + # Print the nearest neighbors for the current query + print(f"Nearest neighbors for load record {i + 1}:") + + for j, neighbor_index in enumerate(neighbor_indices): + # Retrieve the corresponding record from the database + record = query_record_data[neighbor_index] + distance = distances[j] + + # Print the record and its distance + print(f" Neighbor {j + 1}: {record}, Distance: {distance:.6f}") + closest_record_id = record_to_id_map[tuple(record)] + print("Record id:" + closest_record_id) + closest_records.append( + {"id": closest_record_id, "success": True, "created": False} + ) + + print("\n") # Add a newline for better readability between query results + + return closest_records, None + + +def levenshtein_post_process( + load_records: list, query_records: list +) -> T.Tuple[T.List[dict], T.Union[str, None]]: + """Processes the query results for the similarity selection strategy using Levenshtein algorithm for small number of records""" closest_records = [] for record in load_records: @@ -300,3 +407,97 @@ def add_limit_offset_to_user_filter( filter_clause += f" OFFSET {offset_clause}" return f" {filter_clause}" + + +def determine_field_types(df): + numerical_features = [] + boolean_features = [] + categorical_features = [] + + for col in df.columns: + # Check if the column can be converted to numeric + try: + # Attempt to convert to numeric + df[col] = pd.to_numeric(df[col], errors="raise") + numerical_features.append(col) + except ValueError: + # Check for boolean values + if df[col].str.lower().isin(["true", "false"]).all(): + # Map to actual boolean values + df[col] = df[col].str.lower().map({"true": True, "false": False}) + boolean_features.append(col) + else: + categorical_features.append(col) + + return numerical_features, boolean_features, categorical_features + + +def vectorize_records(db_records, query_records, hash_features): + # Convert database records and query records to DataFrames + df_db = pd.DataFrame(db_records) + df_query = pd.DataFrame(query_records) + + # Dynamically determine field types + numerical_features, boolean_features, categorical_features = determine_field_types( + df_db + ) + + # Fit StandardScaler on the numerical features of the database records + scaler = StandardScaler() + if numerical_features: + df_db[numerical_features] = scaler.fit_transform(df_db[numerical_features]) + df_query[numerical_features] = scaler.transform(df_query[numerical_features]) + + # Use HashingVectorizer to transform the categorical features + hashing_vectorizer = HashingVectorizer( + n_features=hash_features, alternate_sign=False + ) + + # For db_records + hashed_categorical_data_db = [] + for col in categorical_features: + hashed_db = hashing_vectorizer.fit_transform(df_db[col]).toarray() + hashed_categorical_data_db.append(hashed_db) + + # For query_records + hashed_categorical_data_query = [] + for col in categorical_features: + hashed_query = hashing_vectorizer.transform(df_query[col]).toarray() + hashed_categorical_data_query.append(hashed_query) + + # Combine all feature types into a single vector for the database records + db_vectors = [] + if numerical_features: + db_vectors.append(df_db[numerical_features].values) + if boolean_features: + db_vectors.append( + df_db[boolean_features].astype(int).values + ) # Convert boolean to int + if hashed_categorical_data_db: + db_vectors.append(np.hstack(hashed_categorical_data_db)) + + # Concatenate database vectors + final_db_vectors = np.hstack(db_vectors) + + # Combine all feature types into a single vector for the query records + query_vectors = [] + if numerical_features: + query_vectors.append(df_query[numerical_features].values) + if boolean_features: + query_vectors.append( + df_query[boolean_features].astype(int).values + ) # Convert boolean to int + if hashed_categorical_data_query: + query_vectors.append(np.hstack(hashed_categorical_data_query)) + + # Concatenate query vectors + final_query_vectors = np.hstack(query_vectors) + + return final_db_vectors, final_query_vectors + + +def replace_empty_strings_with_missing(records): + return [ + [(field if field != "" else "missing") for field in record] + for record in records + ] From 83d45db946a512bbaabd094e9199d2df9d870bd0 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Thu, 7 Nov 2024 11:23:37 +0530 Subject: [PATCH 22/65] Reference parent level record during similarity matching --- cumulusci/tasks/bulkdata/load.py | 117 +++++++++++++- cumulusci/tasks/bulkdata/mapping_parser.py | 18 ++- .../tasks/bulkdata/query_transformers.py | 60 ++++++++ cumulusci/tasks/bulkdata/select_utils.py | 87 ++++++----- cumulusci/tasks/bulkdata/step.py | 143 ++++++++++++++---- 5 files changed, 350 insertions(+), 75 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index f83199050a..d4050c0aca 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -27,6 +27,7 @@ AddMappingFiltersToQuery, AddPersonAccountsToQuery, AddRecordTypesToQuery, + DynamicLookupQueryExtender, ) from cumulusci.tasks.bulkdata.step import ( DEFAULT_BULK_BATCH_SIZE, @@ -314,6 +315,7 @@ def configure_step(self, mapping): bulk_mode = mapping.bulk_mode or self.bulk_mode or "Parallel" api_options = {"batch_size": mapping.batch_size, "bulk_mode": bulk_mode} num_records_in_target = None + content_type = None fields = mapping.get_load_field_list() @@ -343,6 +345,8 @@ def configure_step(self, mapping): api_options["update_key"] = mapping.update_key[0] action = DataOperationType.UPSERT elif mapping.action == DataOperationType.SELECT: + # Set content type to json + content_type = "JSON" # Bulk process expects DataOpertionType to be QUERY action = DataOperationType.QUERY # Determine number of records in the target org @@ -354,6 +358,97 @@ def configure_step(self, mapping): for entry in record_count_response["sObjects"] } num_records_in_target = sobject_map.get(mapping.sf_object, None) + + # Check for similarity selection strategy and modify fields accordingly + if mapping.selection_strategy == "similarity": + # Describe the object to determine polymorphic lookups + describe_result = self.sf.restful( + f"sobjects/{mapping.sf_object}/describe" + ) + polymorphic_fields = { + field["name"]: field + for field in describe_result["fields"] + if field["type"] == "reference" + } + + # Loop through each lookup to get the corresponding fields + for name, lookup in mapping.lookups.items(): + if name in fields: + # Get the index of the lookup field before removing it + insert_index = fields.index(name) + # Remove the lookup field from fields + fields.remove(name) + + # Check if this lookup field is polymorphic + if ( + name in polymorphic_fields + and len(polymorphic_fields[name]["referenceTo"]) > 1 + ): + # Convert to list if string + if not isinstance(lookup.table, list): + lookup.table = [lookup.table] + # Polymorphic field handling + polymorphic_references = lookup.table + relationship_name = polymorphic_fields[name][ + "relationshipName" + ] + + # Loop through each polymorphic type (e.g., Contact, Lead) + for ref_type in polymorphic_references: + # Find the mapping step for this polymorphic type + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.sf_object == ref_type + ), + None, + ) + + if lookup_mapping_step: + lookup_fields = ( + lookup_mapping_step.get_load_field_list() + ) + # Insert fields in the format {relationship_name}.{ref_type}.{lookup_field} + for field in lookup_fields: + fields.insert( + insert_index, + f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}", + ) + insert_index += 1 + + else: + # Non-polymorphic field handling + lookup_table = lookup.table + + if isinstance(lookup_table, list): + lookup_table = lookup_table[0] + + # Get the mapping step for the non-polymorphic reference + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.sf_object == lookup_table + ), + None, + ) + + if lookup_mapping_step: + relationship_name = polymorphic_fields[name][ + "relationshipName" + ] + lookup_fields = ( + lookup_mapping_step.get_load_field_list() + ) + + # Insert the new fields at the same position as the removed lookup field + for field in lookup_fields: + fields.insert( + insert_index, f"{relationship_name}.{field}" + ) + insert_index += 1 + else: action = mapping.action @@ -376,6 +471,7 @@ def configure_step(self, mapping): volume=volume, selection_strategy=mapping.selection_strategy, selection_filter=mapping.selection_filter, + content_type=content_type, ) return step, query @@ -406,6 +502,9 @@ def _stream_queried_data(self, mapping, local_ids, query): pkey = row[0] row = list(row[1:]) + statics + # Replace None values in row with empty strings + row = [value if value is not None else "" for value in row] + if mapping.anchor_date and (date_context[0] or date_context[1]): row = adjust_relative_dates( mapping, date_context, row, DataOperationType.INSERT @@ -475,9 +574,21 @@ def _query_db(self, mapping): AddMappingFiltersToQuery, AddUpsertsToQuery, ] - transformers = [ - AddLookupsToQuery(mapping, self.metadata, model, self._old_format) - ] + transformers = [] + if ( + mapping.action == DataOperationType.SELECT + and mapping.selection_strategy == "similarity" + ): + transformers.append( + DynamicLookupQueryExtender( + mapping, self.mapping, self.metadata, model, self._old_format + ) + ) + else: + transformers.append( + AddLookupsToQuery(mapping, self.metadata, model, self._old_format) + ) + transformers.extend([cls(mapping, self.metadata, model) for cls in classes]) if mapping.sf_object == "Contact" and self._can_load_person_accounts(mapping): diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index e812ca7d16..c9009f82fc 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -103,15 +103,15 @@ class MappingStep(CCIDictModel): batch_size: int = None oid_as_pk: bool = False # this one should be discussed and probably deprecated record_type: Optional[str] = None # should be discussed and probably deprecated - bulk_mode: Optional[ - Literal["Serial", "Parallel"] - ] = None # default should come from task options + bulk_mode: Optional[Literal["Serial", "Parallel"]] = ( + None # default should come from task options + ) anchor_date: Optional[Union[str, date]] = None soql_filter: Optional[str] = None # soql_filter property selection_strategy: SelectStrategy = SelectStrategy.STANDARD # selection strategy - selection_filter: Optional[ - str - ] = None # filter to be added at the end of select query + selection_filter: Optional[str] = ( + None # filter to be added at the end of select query + ) update_key: T.Union[str, T.Tuple[str, ...]] = () # only for upserts @validator("bulk_mode", "api", "action", "selection_strategy", pre=True) @@ -678,7 +678,9 @@ def _infer_and_validate_lookups(mapping: Dict, sf: Salesforce): if len(target_objects) == 1: # This is a non-polymorphic lookup. target_index = list(sf_objects.values()).index(target_objects[0]) - if target_index > idx or target_index == idx: + if ( + target_index > idx or target_index == idx + ) and m.action != DataOperationType.SELECT: # This is a non-polymorphic after step. lookup.after = list(mapping.keys())[idx] else: @@ -730,7 +732,7 @@ def validate_and_inject_mapping( if drop_missing: # Drop any steps with sObjects that are not present. - for (include, step_name) in zip(should_continue, list(mapping.keys())): + for include, step_name in zip(should_continue, list(mapping.keys())): if not include: del mapping[step_name] diff --git a/cumulusci/tasks/bulkdata/query_transformers.py b/cumulusci/tasks/bulkdata/query_transformers.py index aef23f5dc3..eda7a2cabe 100644 --- a/cumulusci/tasks/bulkdata/query_transformers.py +++ b/cumulusci/tasks/bulkdata/query_transformers.py @@ -86,6 +86,66 @@ def join_for_lookup(lookup): return [join_for_lookup(lookup) for lookup in self.lookups] +class DynamicLookupQueryExtender(LoadQueryExtender): + """Dynamically adds columns and joins for all fields in lookup tables, handling polymorphic lookups""" + + def __init__( + self, mapping, all_mappings, metadata, model, _old_format: bool + ) -> None: + super().__init__(mapping, metadata, model) + self._old_format = _old_format + self.all_mappings = all_mappings + self.lookups = [ + lookup for lookup in self.mapping.lookups.values() if not lookup.after + ] + + @cached_property + def columns_to_add(self): + """Add all relevant fields from lookup tables directly without CASE, with support for polymorphic lookups.""" + columns = [] + for lookup in self.lookups: + tables = lookup.table if isinstance(lookup.table, list) else [lookup.table] + lookup.aliased_table = [ + aliased(self.metadata.tables[table]) for table in tables + ] + + for aliased_table, table_name in zip(lookup.aliased_table, tables): + # Find the mapping step for this polymorphic type + lookup_mapping_step = next( + ( + step + for step in self.all_mappings.values() + if step.table == table_name + ), + None, + ) + if lookup_mapping_step: + load_fields = lookup_mapping_step.get_load_field_list() + for field in load_fields: + matching_column = next( + (col for col in aliased_table.columns if col.name == field) + ) + columns.append( + matching_column.label(f"{aliased_table.name}_{field}") + ) + return columns + + @cached_property + def outerjoins_to_add(self): + """Add outer joins for each lookup table directly, including handling for polymorphic lookups.""" + + def join_for_lookup(lookup, aliased_table): + key_field = lookup.get_lookup_key_field(self.model) + value_column = getattr(self.model, key_field) + return (aliased_table, aliased_table.columns.id == value_column) + + joins = [] + for lookup in self.lookups: + for aliased_table in lookup.aliased_table: + joins.append(join_for_lookup(lookup, aliased_table)) + return joins + + class AddRecordTypesToQuery(LoadQueryExtender): """Adds columns, joins and filters relatinng to recordtypes""" diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 741ed17056..d1092504f4 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -142,14 +142,54 @@ def similarity_generate_query( limit: T.Union[int, None], offset: T.Union[int, None], ) -> T.Tuple[str, T.List[str]]: - """Generates the SOQL query for the similarity selection strategy""" - # Construct the query with the WHERE clause (if it exists) - if "Id" not in fields: - fields.insert(0, "Id") - fields_to_query = ", ".join(field for field in fields if field) - + """Generates the SOQL query for the similarity selection strategy, with support for TYPEOF on polymorphic fields.""" + + # Pre-process the new fields format to create a nested dict structure for TYPEOF clauses + nested_fields = {} + regular_fields = [] + + for field in fields: + components = field.split(".") + if len(components) >= 3: + # Handle polymorphic fields (format: {relationship_name}.{ref_obj}.{ref_field}) + relationship, ref_obj, ref_field = ( + components[0], + components[1], + components[2], + ) + if relationship not in nested_fields: + nested_fields[relationship] = {} + if ref_obj not in nested_fields[relationship]: + nested_fields[relationship][ref_obj] = [] + nested_fields[relationship][ref_obj].append(ref_field) + else: + # Handle regular fields (format: {field}) + regular_fields.append(field) + + # Construct the query fields + query_fields = [] + + # Build TYPEOF clauses for polymorphic fields + for relationship, references in nested_fields.items(): + type_clauses = [] + for ref_obj, ref_fields in references.items(): + fields_clause = ", ".join(ref_fields) + type_clauses.append(f"WHEN {ref_obj} THEN {fields_clause}") + type_clause = f"TYPEOF {relationship} {' '.join(type_clauses)} END" + query_fields.append(type_clause) + + # Add regular fields to the query + query_fields.extend(regular_fields) + + # Ensure "Id" is included in the fields list for identification + if "Id" not in query_fields: + query_fields.insert(0, "Id") + + # Build the main SOQL query + fields_to_query = ", ".join(query_fields) query = f"SELECT {fields_to_query} FROM {sobject}" + # Add the user-defined filter clause or default clause if user_filter: query += add_limit_offset_to_user_filter( filter_clause=user_filter, limit_clause=limit, offset_clause=offset @@ -161,7 +201,12 @@ def similarity_generate_query( query += f" WHERE {declaration.where}" query += f" LIMIT {limit}" if limit else "" query += f" OFFSET {offset}" if offset else "" - return query, fields + + # Return the original input fields with "Id" added if needed + if "Id" not in fields: + fields.insert(0, "Id") + + return query, fields # Return the original input fields with "Id" def similarity_post_process( @@ -178,8 +223,6 @@ def similarity_post_process( complexity_constant = load_record_count * query_record_count - print(complexity_constant) - closest_records = [] if complexity_constant < 1000: @@ -187,8 +230,6 @@ def similarity_post_process( else: closest_records = levenshtein_post_process(load_records, query_records) - print(closest_records) - return closest_records @@ -200,14 +241,6 @@ def annoy_post_process( query_records = replace_empty_strings_with_missing(query_records) load_records = replace_empty_strings_with_missing(load_records) - print("Query records: ") - print(query_records) - - print("Load records: ") - print(load_records) - - print("\n\n\n\n") - hash_features = 100 num_trees = 10 @@ -244,29 +277,15 @@ def annoy_post_process( load_vector, n_neighbors, include_distances=True ) neighbor_indices = nearest_neighbors[0] # Indices of nearest neighbors - distances = nearest_neighbors[1] # Distances to nearest neighbors - load_record = load_records[i] # Get the query record for the current index - print(f"Load record {i + 1}: {load_record}\n") # Print the query record - - # Print the nearest neighbors for the current query - print(f"Nearest neighbors for load record {i + 1}:") - - for j, neighbor_index in enumerate(neighbor_indices): + for neighbor_index in neighbor_indices: # Retrieve the corresponding record from the database record = query_record_data[neighbor_index] - distance = distances[j] - - # Print the record and its distance - print(f" Neighbor {j + 1}: {record}, Distance: {distance:.6f}") closest_record_id = record_to_id_map[tuple(record)] - print("Record id:" + closest_record_id) closest_records.append( {"id": closest_record_id, "success": True, "created": False} ) - print("\n") # Add a newline for better readability between query results - return closest_records, None diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 3f3fbaf0f3..b664b48ffc 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -352,6 +352,7 @@ def __init__( fields, selection_strategy=SelectStrategy.STANDARD, selection_filter=None, + content_type=None, ): super().__init__( sobject=sobject, @@ -369,12 +370,13 @@ def __init__( self.select_operation_executor = SelectOperationExecutor(selection_strategy) self.selection_filter = selection_filter + self.content_type = content_type if content_type else "CSV" def start(self): self.job_id = self.bulk.create_job( self.sobject, self.operation.value, - contentType="CSV", + contentType=self.content_type, concurrency=self.api_options.get("bulk_mode", "Parallel"), external_id_name=self.api_options.get("update_key"), ) @@ -498,31 +500,39 @@ def select_records(self, records): # Update job result based on selection outcome self.job_result = DataOperationJobResult( - status=DataOperationStatus.SUCCESS - if len(self.select_results) - else DataOperationStatus.JOB_FAILURE, + status=( + DataOperationStatus.SUCCESS + if len(self.select_results) + else DataOperationStatus.JOB_FAILURE + ), job_errors=[error_message] if error_message else [], records_processed=len(self.select_results), total_row_errors=0, ) def _execute_select_query(self, select_query: str, query_fields: List[str]): - """Executes the select Bulk API query and retrieves the results.""" + """Executes the select Bulk API query, retrieves results in JSON, and converts to CSV format if needed.""" self.batch_id = self.bulk.query(self.job_id, select_query) - self._wait_for_job(self.job_id) + self.bulk.wait_for_batch(self.job_id, self.batch_id) result_ids = self.bulk.get_query_batch_result_ids( self.batch_id, job_id=self.job_id ) select_query_records = [] + for result_id in result_ids: - uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}" + # Modify URI to request JSON format + uri = f"{self.bulk.endpoint}/job/{self.job_id}/batch/{self.batch_id}/result/{result_id}?format=json" + # Download JSON data with download_file(uri, self.bulk) as f: - reader = csv.reader(f) - self.headers = next(reader) - if "Records not found for this query" in self.headers: - break - for row in reader: - select_query_records.append(row[: len(query_fields)]) + data = json.load(f) + # Get headers from fields, expanding nested structures for TYPEOF results + self.headers = query_fields + + # Convert each record to a flat row + for record in data: + flat_record = flatten_record(record, self.headers) + select_query_records.append(flat_record) + return select_query_records def _batch(self, records, n, char_limit=10000000): @@ -641,6 +651,7 @@ def __init__( fields, selection_strategy=SelectStrategy.STANDARD, selection_filter=None, + content_type=None, ): super().__init__( sobject=sobject, @@ -655,7 +666,9 @@ def __init__( field["name"]: field for field in getattr(context.sf, sobject).describe()["fields"] } - self.boolean_fields = [f for f in fields if describe[f]["type"] == "boolean"] + self.boolean_fields = [ + f for f in fields if "." not in f and describe[f]["type"] == "boolean" + ] self.api_options = api_options.copy() self.api_options["batch_size"] = ( self.api_options.get("batch_size") or DEFAULT_REST_BATCH_SIZE @@ -666,6 +679,7 @@ def __init__( self.select_operation_executor = SelectOperationExecutor(selection_strategy) self.selection_filter = selection_filter + self.content_type = content_type def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) @@ -764,9 +778,11 @@ def load_records(self, records): row_errors = len([res for res in self.results if not res["success"]]) self.job_result = DataOperationJobResult( - DataOperationStatus.SUCCESS - if not row_errors - else DataOperationStatus.ROW_FAILURE, + ( + DataOperationStatus.SUCCESS + if not row_errors + else DataOperationStatus.ROW_FAILURE + ), [], len(self.results), row_errors, @@ -775,10 +791,6 @@ def load_records(self, records): def select_records(self, records): """Executes a SOQL query to select records and adds them to results""" - def convert(rec, fields): - """Helper function to convert record values to strings, handling None values""" - return [str(rec[f]) if rec[f] is not None else "" for f in fields] - self.results = [] query_records = [] # Create a copy of the generator using tee @@ -814,17 +826,18 @@ def convert(rec, fields): response = self.sf.restful( requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" ) - query_records.extend( - list(convert(rec, query_fields) for rec in response["records"]) - ) + # Convert each record to a flat row + for record in response["records"]: + flat_record = flatten_record(record, query_fields) + query_records.append(flat_record) while True: if not response["done"]: response = self.sf.query_more( response["nextRecordsUrl"], identifier_is_url=True ) - query_records.extend( - list(convert(rec, query_fields) for rec in response["records"]) - ) + for record in response["records"]: + flat_record = flatten_record(record, query_fields) + query_records.append(flat_record) else: break @@ -844,9 +857,11 @@ def convert(rec, fields): # Update the job result based on the overall selection outcome self.job_result = DataOperationJobResult( - status=DataOperationStatus.SUCCESS - if len(self.results) # Check the overall results length - else DataOperationStatus.JOB_FAILURE, + status=( + DataOperationStatus.SUCCESS + if len(self.results) # Check the overall results length + else DataOperationStatus.JOB_FAILURE + ), job_errors=[error_message] if error_message else [], records_processed=len(self.results), total_row_errors=0, @@ -988,6 +1003,7 @@ def get_dml_operation( api: Optional[DataApi] = DataApi.SMART, selection_strategy: SelectStrategy = SelectStrategy.STANDARD, selection_filter: Union[str, None] = None, + content_type: Union[str, None] = None, ) -> BaseDmlOperation: """Create an appropriate DmlOperation instance for the given parameters, selecting between REST and Bulk APIs based upon volume (Bulk used at volumes over 2000 records, @@ -1023,4 +1039,71 @@ def get_dml_operation( fields=fields, selection_strategy=selection_strategy, selection_filter=selection_filter, + content_type=content_type, ) + + +def extract_flattened_headers(query_fields): + """Extract headers from query fields, including handling of TYPEOF fields.""" + headers = [] + + for field in query_fields: + if isinstance(field, dict): + # Handle TYPEOF / polymorphic fields + for lookup, references in field.items(): + # Assuming each reference is a list of dictionaries + for ref_type in references: + for ref_obj, ref_fields in ref_type.items(): + for nested_field in ref_fields: + headers.append( + f"{lookup}.{ref_obj}.{nested_field}" + ) # Flatten the structure + else: + # Regular fields + headers.append(field) + + return headers + + +def flatten_record(record, headers): + """Flatten each record to match headers, handling nested fields.""" + flat_record = [] + + for field in headers: + components = field.split(".") + value = "" + + # Handle lookup fields with two or three components + if len(components) >= 2: + lookup_field = components[0] + lookup = record.get(lookup_field, None) + + # Check if lookup field exists in the record + if lookup is None: + value = "" + else: + if len(components) == 2: + # Handle fields with two components: {lookup}.{ref_field} + ref_field = components[1] + value = lookup.get(ref_field, "") + elif len(components) == 3: + # Handle fields with three components: {lookup}.{ref_obj}.{ref_field} + ref_obj, ref_field = components[1], components[2] + # Check if the type matches the specified ref_obj + if lookup.get("attributes", {}).get("type") == ref_obj: + value = lookup.get(ref_field, "") + else: + value = "" + + else: + # Regular fields or non-polymorphic fields + value = record.get(field, "") + + # Set None values to empty string + if value is None: + value = "" + + # Append the resolved value to the flattened record + flat_record.append(value) + + return flat_record From a64e43866e65f9e6f8d84ad6c972ed858d8a0310 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Thu, 7 Nov 2024 12:50:30 +0530 Subject: [PATCH 23/65] Fix for test import failure --- cumulusci/tasks/bulkdata/step.py | 2 + cumulusci/tasks/bulkdata/tests/test_step.py | 190 ++++++++++---------- 2 files changed, 96 insertions(+), 96 deletions(-) diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index b664b48ffc..cb86bda6fa 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -1102,6 +1102,8 @@ def flatten_record(record, headers): # Set None values to empty string if value is None: value = "" + elif not isinstance(value, str): + value = str(value) # Append the resolved value to the flattened record flat_record.append(value) diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index c182a92996..da13a9a8eb 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -20,7 +20,6 @@ RestApiDmlOperation, RestApiQueryOperation, download_file, - generate_user_filter_query, get_dml_operation, get_query_operation, ) @@ -2491,98 +2490,97 @@ def test_cleanup_date_strings__upsert_update(self, operation): import pytest - -def test_generate_user_filter_query_basic(): - """Tests basic query generation without existing LIMIT or OFFSET.""" - filter_clause = "WHERE Name = 'John'" - sobject = "Account" - fields = ["Id", "Name"] - limit_clause = 10 - offset_clause = 5 - - expected_query = ( - "SELECT Id, Name FROM Account WHERE Name = 'John' LIMIT 10 OFFSET 5" - ) - assert ( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - == expected_query - ) - - -def test_generate_user_filter_query_existing_limit(): - """Tests handling of existing LIMIT in the filter clause.""" - filter_clause = "WHERE Name = 'John' LIMIT 20" - sobject = "Contact" - fields = ["Id", "FirstName"] - limit_clause = 5 # Should override the existing LIMIT - offset_clause = None - - expected_query = "SELECT Id, FirstName FROM Contact WHERE Name = 'John' LIMIT 5" - assert ( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - == expected_query - ) - - -def test_generate_user_filter_query_existing_offset(): - """Tests handling of existing OFFSET in the filter clause.""" - filter_clause = "WHERE Name = 'John' OFFSET 15" - sobject = "Opportunity" - fields = ["Id", "Name"] - limit_clause = None - offset_clause = 10 # Should add to the existing OFFSET - - expected_query = "SELECT Id, Name FROM Opportunity WHERE Name = 'John' OFFSET 25" - assert ( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - == expected_query - ) - - -def test_generate_user_filter_query_no_limit_or_offset(): - """Tests when no limit or offset is provided or present in the filter.""" - filter_clause = "WHERE Name = 'John' LIMIT 5 OFFSET 20" - sobject = "Lead" - fields = ["Id", "Name", "Email"] - limit_clause = None - offset_clause = None - - expected_query = ( - "SELECT Id, Name, Email FROM Lead WHERE Name = 'John' LIMIT 5 OFFSET 20" - ) - print( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - ) - assert ( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - == expected_query - ) - - -def test_generate_user_filter_query_case_insensitivity(): - """Tests case-insensitivity for LIMIT and OFFSET.""" - filter_clause = "where name = 'John' offset 5 limit 20" - sobject = "Task" - fields = ["Id", "Subject"] - limit_clause = 15 - offset_clause = 20 - - expected_query = ( - "SELECT Id, Subject FROM Task where name = 'John' LIMIT 15 OFFSET 25" - ) - assert ( - generate_user_filter_query( - filter_clause, sobject, fields, limit_clause, offset_clause - ) - == expected_query - ) +# def test_generate_user_filter_query_basic(): +# """Tests basic query generation without existing LIMIT or OFFSET.""" +# filter_clause = "WHERE Name = 'John'" +# sobject = "Account" +# fields = ["Id", "Name"] +# limit_clause = 10 +# offset_clause = 5 + +# expected_query = ( +# "SELECT Id, Name FROM Account WHERE Name = 'John' LIMIT 10 OFFSET 5" +# ) +# assert ( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# == expected_query +# ) + + +# def test_generate_user_filter_query_existing_limit(): +# """Tests handling of existing LIMIT in the filter clause.""" +# filter_clause = "WHERE Name = 'John' LIMIT 20" +# sobject = "Contact" +# fields = ["Id", "FirstName"] +# limit_clause = 5 # Should override the existing LIMIT +# offset_clause = None + +# expected_query = "SELECT Id, FirstName FROM Contact WHERE Name = 'John' LIMIT 5" +# assert ( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# == expected_query +# ) + + +# def test_generate_user_filter_query_existing_offset(): +# """Tests handling of existing OFFSET in the filter clause.""" +# filter_clause = "WHERE Name = 'John' OFFSET 15" +# sobject = "Opportunity" +# fields = ["Id", "Name"] +# limit_clause = None +# offset_clause = 10 # Should add to the existing OFFSET + +# expected_query = "SELECT Id, Name FROM Opportunity WHERE Name = 'John' OFFSET 25" +# assert ( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# == expected_query +# ) + + +# def test_generate_user_filter_query_no_limit_or_offset(): +# """Tests when no limit or offset is provided or present in the filter.""" +# filter_clause = "WHERE Name = 'John' LIMIT 5 OFFSET 20" +# sobject = "Lead" +# fields = ["Id", "Name", "Email"] +# limit_clause = None +# offset_clause = None + +# expected_query = ( +# "SELECT Id, Name, Email FROM Lead WHERE Name = 'John' LIMIT 5 OFFSET 20" +# ) +# print( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# ) +# assert ( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# == expected_query +# ) + + +# def test_generate_user_filter_query_case_insensitivity(): +# """Tests case-insensitivity for LIMIT and OFFSET.""" +# filter_clause = "where name = 'John' offset 5 limit 20" +# sobject = "Task" +# fields = ["Id", "Subject"] +# limit_clause = 15 +# offset_clause = 20 + +# expected_query = ( +# "SELECT Id, Subject FROM Task where name = 'John' LIMIT 15 OFFSET 25" +# ) +# assert ( +# generate_user_filter_query( +# filter_clause, sobject, fields, limit_clause, offset_clause +# ) +# == expected_query +# ) From f86f94da5c102a95c3ba235207285dd70c287a98 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Thu, 7 Nov 2024 14:37:43 +0530 Subject: [PATCH 24/65] Fix for no records if parent sobject not found --- cumulusci/tasks/bulkdata/select_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index d1092504f4..0e842a09e1 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -175,7 +175,7 @@ def similarity_generate_query( for ref_obj, ref_fields in references.items(): fields_clause = ", ".join(ref_fields) type_clauses.append(f"WHEN {ref_obj} THEN {fields_clause}") - type_clause = f"TYPEOF {relationship} {' '.join(type_clauses)} END" + type_clause = f"TYPEOF {relationship} {' '.join(type_clauses)} ELSE Id END" query_fields.append(type_clause) # Add regular fields to the query @@ -206,7 +206,7 @@ def similarity_generate_query( if "Id" not in fields: fields.insert(0, "Id") - return query, fields # Return the original input fields with "Id" + return query, fields def similarity_post_process( @@ -226,9 +226,9 @@ def similarity_post_process( closest_records = [] if complexity_constant < 1000: - closest_records = annoy_post_process(load_records, query_records) - else: closest_records = levenshtein_post_process(load_records, query_records) + else: + closest_records = annoy_post_process(load_records, query_records) return closest_records From 7fae06e54f76d0508d13c8fb4ffbcf81f8cb0c26 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Fri, 8 Nov 2024 03:57:08 +0530 Subject: [PATCH 25/65] Functionality to prioritize user specified fields --- cumulusci/tasks/bulkdata/load.py | 9 +- cumulusci/tasks/bulkdata/mapping_parser.py | 44 +++--- .../tasks/bulkdata/query_transformers.py | 6 +- cumulusci/tasks/bulkdata/select_utils.py | 134 +++++++++++++----- cumulusci/tasks/bulkdata/step.py | 33 +++++ cumulusci/tasks/bulkdata/utils.py | 14 ++ 6 files changed, 180 insertions(+), 60 deletions(-) diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index d4050c0aca..ced885744b 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -360,7 +360,7 @@ def configure_step(self, mapping): num_records_in_target = sobject_map.get(mapping.sf_object, None) # Check for similarity selection strategy and modify fields accordingly - if mapping.selection_strategy == "similarity": + if mapping.select_options.strategy == "similarity": # Describe the object to determine polymorphic lookups describe_result = self.sf.restful( f"sobjects/{mapping.sf_object}/describe" @@ -469,8 +469,9 @@ def configure_step(self, mapping): fields=fields, api=mapping.api, volume=volume, - selection_strategy=mapping.selection_strategy, - selection_filter=mapping.selection_filter, + selection_strategy=mapping.select_options.strategy, + selection_filter=mapping.select_options.filter, + selection_priority_fields=mapping.select_options.priority_fields, content_type=content_type, ) return step, query @@ -577,7 +578,7 @@ def _query_db(self, mapping): transformers = [] if ( mapping.action == DataOperationType.SELECT - and mapping.selection_strategy == "similarity" + and mapping.select_options.strategy == "similarity" ): transformers.append( DynamicLookupQueryExtender( diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index c9009f82fc..6bad4f7bdd 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -8,34 +8,21 @@ from typing import IO, Any, Callable, Dict, List, Mapping, Optional, Tuple, Union from pydantic import Field, ValidationError, root_validator, validator -from requests.structures import CaseInsensitiveDict as RequestsCaseInsensitiveDict from simple_salesforce import Salesforce from typing_extensions import Literal from cumulusci.core.enums import StrEnum from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.dates import iso_to_date -from cumulusci.tasks.bulkdata.select_utils import SelectStrategy +from cumulusci.tasks.bulkdata.select_utils import SelectOptions, SelectStrategy from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType +from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict from cumulusci.utils import convert_to_snake_case from cumulusci.utils.yaml.model_parser import CCIDictModel logger = getLogger(__name__) -class CaseInsensitiveDict(RequestsCaseInsensitiveDict): - def __init__(self, *args, **kwargs): - self._canonical_keys = {} - super().__init__(*args, **kwargs) - - def canonical_key(self, name): - return self._canonical_keys[name.lower()] - - def __setitem__(self, key, value): - super().__setitem__(key, value) - self._canonical_keys[key.lower()] = key - - class MappingLookup(CCIDictModel): "Lookup relationship between two tables." table: Union[str, List[str]] # Support for polymorphic lookups @@ -85,7 +72,7 @@ class BulkMode(StrEnum): ENUM_VALUES = { v.value.lower(): v.value - for enum in [BulkMode, DataApi, DataOperationType, SelectStrategy] + for enum in [BulkMode, DataApi, DataOperationType] for v in enum.__members__.values() } @@ -108,13 +95,12 @@ class MappingStep(CCIDictModel): ) anchor_date: Optional[Union[str, date]] = None soql_filter: Optional[str] = None # soql_filter property - selection_strategy: SelectStrategy = SelectStrategy.STANDARD # selection strategy - selection_filter: Optional[str] = ( - None # filter to be added at the end of select query + select_options: Optional[SelectOptions] = Field( + default_factory=lambda: SelectOptions(strategy=SelectStrategy.STANDARD) ) update_key: T.Union[str, T.Tuple[str, ...]] = () # only for upserts - @validator("bulk_mode", "api", "action", "selection_strategy", pre=True) + @validator("bulk_mode", "api", "action", pre=True) def case_normalize(cls, val): if isinstance(val, Enum): return val @@ -134,6 +120,24 @@ def split_update_key(cls, val): ), "`update_key` should be a field name or list of field names." assert False, "Should be unreachable" # pragma: no cover + @root_validator + def validate_priority_fields(cls, values): + select_options = values.get("select_options") + fields_ = values.get("fields_", {}) + + if select_options and select_options.priority_fields: + priority_field_names = set(select_options.priority_fields.keys()) + field_names = set(fields_.keys()) + + # Check if all priority fields are present in the fields + missing_fields = priority_field_names - field_names + if missing_fields: + raise ValueError( + f"Priority fields {missing_fields} are not present in 'fields'" + ) + + return values + def get_oid_as_pk(self): """Returns True if using Salesforce Ids as primary keys.""" return "Id" in self.fields diff --git a/cumulusci/tasks/bulkdata/query_transformers.py b/cumulusci/tasks/bulkdata/query_transformers.py index eda7a2cabe..b4daa4bd93 100644 --- a/cumulusci/tasks/bulkdata/query_transformers.py +++ b/cumulusci/tasks/bulkdata/query_transformers.py @@ -123,7 +123,11 @@ def columns_to_add(self): load_fields = lookup_mapping_step.get_load_field_list() for field in load_fields: matching_column = next( - (col for col in aliased_table.columns if col.name == field) + ( + col + for col in aliased_table.columns + if col.name == lookup_mapping_step.fields[field] + ) ) columns.append( matching_column.label(f"{aliased_table.name}_{field}") diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 0e842a09e1..6de6adf652 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -1,10 +1,12 @@ import random import re import typing as T +from enum import Enum import numpy as np import pandas as pd from annoy import AnnoyIndex +from pydantic import Field, validator from sklearn.feature_extraction.text import HashingVectorizer from sklearn.preprocessing import StandardScaler @@ -12,6 +14,8 @@ from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( DEFAULT_DECLARATIONS, ) +from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict +from cumulusci.utils.yaml.model_parser import CCIDictModel class SelectStrategy(StrEnum): @@ -30,6 +34,35 @@ class SelectRecordRetrievalMode(StrEnum): MATCH = "match" +ENUM_VALUES = { + v.value.lower(): v.value + for enum in [SelectStrategy] + for v in enum.__members__.values() +} + + +class SelectOptions(CCIDictModel): + filter: T.Optional[str] = None # Optional filter for selection + strategy: SelectStrategy = SelectStrategy.STANDARD # Strategy for selection + priority_fields: T.Dict[str, str] = Field({}) + + @validator("strategy", pre=True) + def validate_strategy(cls, value): + if isinstance(value, Enum): + return value + if value is not None: + return ENUM_VALUES.get(value.lower()) + raise ValueError(f"Invalid strategy value: {value}") + + @validator("priority_fields", pre=True) + def standardize_fields_to_dict(cls, values): + if values is None: + values = {} + if type(values) is list: + values = {elem: elem for elem in values} + return CaseInsensitiveDict(values) + + class SelectOperationExecutor: def __init__(self, strategy: SelectStrategy): self.strategy = strategy @@ -68,7 +101,12 @@ def select_generate_query( ) def select_post_process( - self, load_records, query_records: list, num_records: int, sobject: str + self, + load_records, + query_records: list, + num_records: int, + sobject: str, + weights: list, ): # For STANDARD strategy if self.strategy == SelectStrategy.STANDARD: @@ -78,7 +116,10 @@ def select_post_process( # For SIMILARITY strategy elif self.strategy == SelectStrategy.SIMILARITY: return similarity_post_process( - load_records=load_records, query_records=query_records, sobject=sobject + load_records=load_records, + query_records=query_records, + sobject=sobject, + weights=weights, ) # For RANDOM strategy elif self.strategy == SelectStrategy.RANDOM: @@ -210,7 +251,7 @@ def similarity_generate_query( def similarity_post_process( - load_records, query_records: list, sobject: str + load_records, query_records: list, sobject: str, weights: list ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records @@ -226,15 +267,15 @@ def similarity_post_process( closest_records = [] if complexity_constant < 1000: - closest_records = levenshtein_post_process(load_records, query_records) + closest_records = levenshtein_post_process(load_records, query_records, weights) else: - closest_records = annoy_post_process(load_records, query_records) + closest_records = annoy_post_process(load_records, query_records, weights) return closest_records def annoy_post_process( - load_records: list, query_records: list + load_records: list, query_records: list, weights: list ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy using Annoy algorithm for large number of records""" @@ -253,7 +294,7 @@ def annoy_post_process( } final_load_vectors, final_query_vectors = vectorize_records( - load_records, query_record_data, hash_features=hash_features + load_records, query_record_data, hash_features=hash_features, weights=weights ) # Create Annoy index for nearest neighbor search @@ -290,13 +331,13 @@ def annoy_post_process( def levenshtein_post_process( - load_records: list, query_records: list + load_records: list, query_records: list, weights: list ) -> T.Tuple[T.List[dict], T.Union[str, None]]: """Processes the query results for the similarity selection strategy using Levenshtein algorithm for small number of records""" closest_records = [] for record in load_records: - closest_record = find_closest_record(record, query_records) + closest_record = find_closest_record(record, query_records, weights) closest_records.append( {"id": closest_record[0], "success": True, "created": False} ) @@ -324,12 +365,12 @@ def random_post_process( return selected_records, None -def find_closest_record(load_record: list, query_records: list): +def find_closest_record(load_record: list, query_records: list, weights: list): closest_distance = float("inf") closest_record = query_records[0] for record in query_records: - distance = calculate_levenshtein_distance(load_record, record[1:]) + distance = calculate_levenshtein_distance(load_record, record[1:], weights) if distance < closest_distance: closest_distance = distance closest_record = record @@ -361,15 +402,16 @@ def levenshtein_distance(str1: str, str2: str): return dp[-1][-1] -def calculate_levenshtein_distance(record1: list, record2: list): +def calculate_levenshtein_distance(record1: list, record2: list, weights: list): if len(record1) != len(record2): raise ValueError("Records must have the same number of fields.") + elif len(record1) != len(weights): + raise ValueError("Records must be same size as fields (weights).") total_distance = 0 total_fields = 0 - for field1, field2 in zip(record1, record2): - + for field1, field2, weight in zip(record1, record2, weights): field1 = field1.lower() field2 = field2.lower() @@ -382,7 +424,8 @@ def calculate_levenshtein_distance(record1: list, record2: list): # If one field is blank, reduce the impact of the distance distance = distance * 0.05 # Fixed value for blank vs non-blank - total_distance += distance + # Multiply the distance by the corresponding weight + total_distance += distance * weight total_fields += 1 return total_distance / total_fields if total_fields > 0 else 0 @@ -428,38 +471,57 @@ def add_limit_offset_to_user_filter( return f" {filter_clause}" -def determine_field_types(df): +def determine_field_types(df, weights): numerical_features = [] boolean_features = [] categorical_features = [] - for col in df.columns: + numerical_weights = [] + boolean_weights = [] + categorical_weights = [] + + for col, weight in zip(df.columns, weights): # Check if the column can be converted to numeric try: # Attempt to convert to numeric df[col] = pd.to_numeric(df[col], errors="raise") numerical_features.append(col) + numerical_weights.append(weight) except ValueError: # Check for boolean values if df[col].str.lower().isin(["true", "false"]).all(): # Map to actual boolean values df[col] = df[col].str.lower().map({"true": True, "false": False}) boolean_features.append(col) + boolean_weights.append(weight) else: categorical_features.append(col) - - return numerical_features, boolean_features, categorical_features + categorical_weights.append(weight) + + return ( + numerical_features, + boolean_features, + categorical_features, + numerical_weights, + boolean_weights, + categorical_weights, + ) -def vectorize_records(db_records, query_records, hash_features): +def vectorize_records(db_records, query_records, hash_features, weights): # Convert database records and query records to DataFrames df_db = pd.DataFrame(db_records) df_query = pd.DataFrame(query_records) - # Dynamically determine field types - numerical_features, boolean_features, categorical_features = determine_field_types( - df_db - ) + # Determine field types and corresponding weights + ( + numerical_features, + boolean_features, + categorical_features, + numerical_weights, + boolean_weights, + categorical_weights, + ) = determine_field_types(df_db, weights) # Fit StandardScaler on the numerical features of the database records scaler = StandardScaler() @@ -474,24 +536,26 @@ def vectorize_records(db_records, query_records, hash_features): # For db_records hashed_categorical_data_db = [] - for col in categorical_features: + for idx, col in enumerate(categorical_features): hashed_db = hashing_vectorizer.fit_transform(df_db[col]).toarray() - hashed_categorical_data_db.append(hashed_db) + # Apply weight to the hashed vector for this categorical feature + hashed_db_weighted = hashed_db * categorical_weights[idx] + hashed_categorical_data_db.append(hashed_db_weighted) # For query_records hashed_categorical_data_query = [] - for col in categorical_features: + for idx, col in enumerate(categorical_features): hashed_query = hashing_vectorizer.transform(df_query[col]).toarray() - hashed_categorical_data_query.append(hashed_query) + # Apply weight to the hashed vector for this categorical feature + hashed_query_weighted = hashed_query * categorical_weights[idx] + hashed_categorical_data_query.append(hashed_query_weighted) # Combine all feature types into a single vector for the database records db_vectors = [] if numerical_features: - db_vectors.append(df_db[numerical_features].values) + db_vectors.append(df_db[numerical_features].values * numerical_weights) if boolean_features: - db_vectors.append( - df_db[boolean_features].astype(int).values - ) # Convert boolean to int + db_vectors.append(df_db[boolean_features].astype(int).values * boolean_weights) if hashed_categorical_data_db: db_vectors.append(np.hstack(hashed_categorical_data_db)) @@ -501,11 +565,11 @@ def vectorize_records(db_records, query_records, hash_features): # Combine all feature types into a single vector for the query records query_vectors = [] if numerical_features: - query_vectors.append(df_query[numerical_features].values) + query_vectors.append(df_query[numerical_features].values * numerical_weights) if boolean_features: query_vectors.append( - df_query[boolean_features].astype(int).values - ) # Convert boolean to int + df_query[boolean_features].astype(int).values * boolean_weights + ) if hashed_categorical_data_query: query_vectors.append(np.hstack(hashed_categorical_data_query)) diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index cb86bda6fa..ba0243c033 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -28,6 +28,8 @@ DEFAULT_BULK_BATCH_SIZE = 10_000 DEFAULT_REST_BATCH_SIZE = 200 MAX_REST_BATCH_SIZE = 200 +HIGH_PRIORITY_VALUE = 3 +LOW_PRIORITY_VALUE = 0.5 csv.field_size_limit(2**27) # 128 MB @@ -352,6 +354,7 @@ def __init__( fields, selection_strategy=SelectStrategy.STANDARD, selection_filter=None, + selection_priority_fields=None, content_type=None, ): super().__init__( @@ -370,6 +373,9 @@ def __init__( self.select_operation_executor = SelectOperationExecutor(selection_strategy) self.selection_filter = selection_filter + self.weights = assign_weights( + priority_fields=selection_priority_fields, fields=fields + ) self.content_type = content_type if content_type else "CSV" def start(self): @@ -494,6 +500,7 @@ def select_records(self, records): query_records=query_records, num_records=total_num_records, sobject=self.sobject, + weights=self.weights, ) if not error_message: self.select_results.extend(selected_records) @@ -651,6 +658,7 @@ def __init__( fields, selection_strategy=SelectStrategy.STANDARD, selection_filter=None, + selection_priority_fields=None, content_type=None, ): super().__init__( @@ -679,6 +687,9 @@ def __init__( self.select_operation_executor = SelectOperationExecutor(selection_strategy) self.selection_filter = selection_filter + self.weights = assign_weights( + priority_fields=selection_priority_fields, fields=fields + ) self.content_type = content_type def _record_to_json(self, rec): @@ -850,6 +861,7 @@ def select_records(self, records): query_records=query_records, num_records=total_num_records, sobject=self.sobject, + weights=self.weights, ) if not error_message: # Add selected records from this batch to the overall results @@ -1003,6 +1015,7 @@ def get_dml_operation( api: Optional[DataApi] = DataApi.SMART, selection_strategy: SelectStrategy = SelectStrategy.STANDARD, selection_filter: Union[str, None] = None, + selection_priority_fields: Union[dict, None] = None, content_type: Union[str, None] = None, ) -> BaseDmlOperation: """Create an appropriate DmlOperation instance for the given parameters, selecting @@ -1039,6 +1052,7 @@ def get_dml_operation( fields=fields, selection_strategy=selection_strategy, selection_filter=selection_filter, + selection_priority_fields=selection_priority_fields, content_type=content_type, ) @@ -1109,3 +1123,22 @@ def flatten_record(record, headers): flat_record.append(value) return flat_record + + +def assign_weights( + priority_fields: Union[Dict[str, str], None], fields: List[str] +) -> list: + # If priority_fields is None or an empty dictionary, set all weights to 1 + if not priority_fields: + return [1] * len(fields) + + # Initialize the weight list with LOW_PRIORITY_VALUE + weights = [LOW_PRIORITY_VALUE] * len(fields) + + # Iterate over the fields and assign weights based on priority_fields + for i, field in enumerate(fields): + if field in priority_fields: + # Set weight to HIGH_PRIORITY_VALUE if field is in priority_fields + weights[i] = HIGH_PRIORITY_VALUE + + return weights diff --git a/cumulusci/tasks/bulkdata/utils.py b/cumulusci/tasks/bulkdata/utils.py index b5c195a817..cee6a4ab66 100644 --- a/cumulusci/tasks/bulkdata/utils.py +++ b/cumulusci/tasks/bulkdata/utils.py @@ -5,6 +5,7 @@ from contextlib import contextmanager, nullcontext from pathlib import Path +from requests.structures import CaseInsensitiveDict as RequestsCaseInsensitiveDict from simple_salesforce import Salesforce from sqlalchemy import Boolean, Column, MetaData, Table, Unicode, inspect from sqlalchemy.engine.base import Connection @@ -23,6 +24,19 @@ class DataApi(StrEnum): SMART = "smart" +class CaseInsensitiveDict(RequestsCaseInsensitiveDict): + def __init__(self, *args, **kwargs): + self._canonical_keys = {} + super().__init__(*args, **kwargs) + + def canonical_key(self, name): + return self._canonical_keys[name.lower()] + + def __setitem__(self, key, value): + super().__setitem__(key, value) + self._canonical_keys[key.lower()] = key + + class SqlAlchemyMixin: logger: logging.Logger metadata: MetaData From 0ac200032597aa06079f261b3122b0cf2bef46a8 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Fri, 8 Nov 2024 19:01:54 +0530 Subject: [PATCH 26/65] Add tests for annoy_post_process --- .../tasks/bulkdata/tests/test_select_utils.py | 339 +++++++++++++----- 1 file changed, 244 insertions(+), 95 deletions(-) diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index fe037a0177..26768d4ea1 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -1,11 +1,16 @@ +import pandas as pd import pytest from cumulusci.tasks.bulkdata.select_utils import ( SelectOperationExecutor, SelectStrategy, + annoy_post_process, calculate_levenshtein_distance, + determine_field_types, find_closest_record, levenshtein_distance, + replace_empty_strings_with_missing, + vectorize_records, ) @@ -193,107 +198,56 @@ def test_levenshtein_distance(): ) # Longer strings with multiple differences -def test_calculate_levenshtein_distance(): - # Identical records - record1 = ["Tom Cruise", "24", "Actor"] - record2 = ["Tom Cruise", "24", "Actor"] - assert calculate_levenshtein_distance(record1, record2) == 0 # Distance should be 0 - - # Records with one different field - record1 = ["Tom Cruise", "24", "Actor"] - record2 = ["Tom Hanks", "24", "Actor"] - assert calculate_levenshtein_distance(record1, record2) > 0 # Non-zero distance - - # One record has an empty field - record1 = ["Tom Cruise", "24", "Actor"] - record2 = ["Tom Cruise", "", "Actor"] - assert ( - calculate_levenshtein_distance(record1, record2) > 0 - ) # Distance should reflect the empty field - - # Completely empty records - record1 = ["", "", ""] - record2 = ["", "", ""] - assert calculate_levenshtein_distance(record1, record2) == 0 # Distance should be 0 - - -def test_calculate_levenshtein_distance_error(): - # Identical records - record1 = ["Tom Cruise", "24", "Actor"] - record2 = [ - "Tom Cruise", - "24", - "Actor", - "SomethingElse", - ] # Record Length does not match - with pytest.raises(ValueError) as e: - calculate_levenshtein_distance(record1, record2) - assert "Records must have the same number of fields" in str(e.value) - - -def test_find_closest_record(): - # Test case 1: Exact match - load_record = ["Tom Cruise", "62", "Actor"] - query_records = [ - [1, "Tom Hanks", "30", "Actor"], - [2, "Tom Cruise", "62", "Actor"], # Exact match - [3, "Jennifer Aniston", "30", "Actress"], - ] - assert find_closest_record(load_record, query_records) == [ - 2, - "Tom Cruise", - "62", - "Actor", - ] # Should return the exact match - - # Test case 2: Closest match with slight differences - load_record = ["Tom Cruise", "62", "Actor"] +def test_find_closest_record_different_weights(): + load_record = ["hello", "world"] query_records = [ - [1, "Tom Hanks", "62", "Actor"], - [2, "Tom Cruise", "63", "Actor"], # Slight difference - [3, "Jennifer Aniston", "30", "Actress"], + ["record1", "hello", "word"], # Levenshtein distance = 1 + ["record2", "hullo", "word"], # Levenshtein distance = 1 + ["record3", "hello", "word"], # Levenshtein distance = 1 ] - assert find_closest_record(load_record, query_records) == [ - 2, - "Tom Cruise", - "63", - "Actor", - ] # Should return the closest match - - # Test case 3: All records are significantly different - load_record = ["Tom Cruise", "62", "Actor"] + weights = [2.0, 0.5] + + # With different weights, the first field will have more impact + closest_record = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record1", + "hello", + "word", + ], "The closest record should be 'record1'." + + +def test_find_closest_record_basic(): + load_record = ["hello", "world"] query_records = [ - [1, "Brad Pitt", "30", "Producer"], - [2, "Leonardo DiCaprio", "40", "Director"], - [3, "Jennifer Aniston", "30", "Actress"], + ["record1", "hello", "word"], # Levenshtein distance = 1 + ["record2", "hullo", "word"], # Levenshtein distance = 1 + ["record3", "hello", "word"], # Levenshtein distance = 1 ] - assert ( - find_closest_record(load_record, query_records) == query_records[0] - ) # Should return the first record as the closest (though none are close) + weights = [1.0, 1.0] + + closest_record = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record1", + "hello", + "word", + ], "The closest record should be 'record1'." - # Test case 4: Closest match is the last in the list - load_record = ["Tom Cruise", "62", "Actor"] + +def test_find_closest_record_multiple_matches(): + load_record = ["cat", "dog"] query_records = [ - [1, "Johnny Depp", "50", "Actor"], - [2, "Brad Pitt", "30", "Producer"], - [3, "Tom Cruise", "62", "Actor"], # Exact match as the last record + ["record1", "bat", "dog"], # Levenshtein distance = 1 + ["record2", "cat", "dog"], # Levenshtein distance = 0 + ["record3", "dog", "cat"], # Levenshtein distance = 3 ] - assert find_closest_record(load_record, query_records) == [ - 3, - "Tom Cruise", - "62", - "Actor", - ] # Should return the last record - - # Test case 5: Single record in query_records - load_record = ["Tom Cruise", "62", "Actor"] - query_records = [[1, "Johnny Depp", "50", "Actor"]] - assert find_closest_record(load_record, query_records) == [ - 1, - "Johnny Depp", - "50", - "Actor", - ] # Should return the only record available + weights = [1.0, 1.0] + + closest_record = find_closest_record(load_record, query_records, weights) + assert closest_record == [ + "record2", + "cat", + "dog", + ], "The closest record should be 'record2'." def test_similarity_post_process_with_records(): @@ -307,10 +261,16 @@ def test_similarity_post_process_with_records(): ["003", "Jennifer Aniston", "30", "Actress"], ] + weights = [1.0, 1.0, 1.0] # Adjust weights to match your data structure + selected_records, error_message = select_operator.select_post_process( - load_records, query_records, num_records, sobject + load_records, query_records, num_records, sobject, weights ) + # selected_records, error_message = select_operator.select_post_process( + # load_records, query_records, num_records, sobject + # ) + assert error_message is None assert len(selected_records) == num_records assert all(record["success"] for record in selected_records) @@ -329,3 +289,192 @@ def test_similarity_post_process_with_no_records(): assert selected_records == [] assert error_message == f"No records found for {sobject} in the target org." + + +def test_calculate_levenshtein_distance_basic(): + record1 = ["hello", "world"] + record2 = ["hullo", "word"] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "word") = 1 + expected_distance = (1 * 1.0 + 1 * 1.0) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation failed." + + +def test_calculate_levenshtein_distance_weighted(): + record1 = ["cat", "dog"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5] + + # Levenshtein("cat", "bat") = 1, Levenshtein("dog", "fog") = 1 + expected_distance = (1 * 2.0 + 1 * 0.5) / 2 # Weighted average over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Weighted distance calculation failed." + + +def test_replace_empty_strings_with_missing(): + # Case 1: Normal case with some empty strings + records = [ + ["Alice", "", "New York"], + ["Bob", "Engineer", ""], + ["", "Teacher", "Chicago"], + ] + expected = [ + ["Alice", "missing", "New York"], + ["Bob", "Engineer", "missing"], + ["missing", "Teacher", "Chicago"], + ] + assert replace_empty_strings_with_missing(records) == expected + + # Case 2: No empty strings, so the output should be the same as input + records = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] + expected = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] + assert replace_empty_strings_with_missing(records) == expected + + # Case 3: List with all empty strings + records = [["", "", ""], ["", "", ""]] + expected = [["missing", "missing", "missing"], ["missing", "missing", "missing"]] + assert replace_empty_strings_with_missing(records) == expected + + # Case 4: Empty list (should return an empty list) + records = [] + expected = [] + assert replace_empty_strings_with_missing(records) == expected + + # Case 5: List with some empty sublists + records = [[], ["Alice", ""], []] + expected = [[], ["Alice", "missing"], []] + assert replace_empty_strings_with_missing(records) == expected + + +def test_all_numeric_columns(): + df = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) + weights = [0.1, 0.2] + expected_output = ( + ["A", "B"], # numerical_features + [], # boolean_features + [], # categorical_features + [0.1, 0.2], # numerical_weights + [], # boolean_weights + [], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_all_boolean_columns(): + df = pd.DataFrame({"A": ["true", "false", "true"], "B": ["false", "true", "false"]}) + weights = [0.3, 0.4] + expected_output = ( + [], # numerical_features + ["A", "B"], # boolean_features + [], # categorical_features + [], # numerical_weights + [0.3, 0.4], # boolean_weights + [], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_all_categorical_columns(): + df = pd.DataFrame( + {"A": ["apple", "banana", "cherry"], "B": ["dog", "cat", "mouse"]} + ) + weights = [0.5, 0.6] + expected_output = ( + [], # numerical_features + [], # boolean_features + ["A", "B"], # categorical_features + [], # numerical_weights + [], # boolean_weights + [0.5, 0.6], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_mixed_types(): + df = pd.DataFrame( + { + "A": [1, 2, 3], + "B": ["true", "false", "true"], + "C": ["apple", "banana", "cherry"], + } + ) + weights = [0.7, 0.8, 0.9] + expected_output = ( + ["A"], # numerical_features + ["B"], # boolean_features + ["C"], # categorical_features + [0.7], # numerical_weights + [0.8], # boolean_weights + [0.9], # categorical_weights + ) + assert determine_field_types(df, weights) == expected_output + + +def test_vectorize_records_mixed_numerical_categorical(): + # Test data with mixed types: numerical and categorical only + db_records = [["1.0", "apple"], ["2.0", "banana"]] + query_records = [["1.5", "apple"], ["2.5", "cherry"]] + weights = [1.0, 1.0] # Equal weights for numerical and categorical columns + hash_features = 4 # Number of hashing vectorizer features for categorical columns + + final_db_vectors, final_query_vectors = vectorize_records( + db_records, query_records, hash_features, weights + ) + + # Check the shape of the output vectors + assert final_db_vectors.shape[0] == len(db_records), "DB vectors row count mismatch" + assert final_query_vectors.shape[0] == len( + query_records + ), "Query vectors row count mismatch" + + # Expected dimensions: numerical (1) + categorical hashed features (4) + expected_feature_count = 1 + hash_features + assert ( + final_db_vectors.shape[1] == expected_feature_count + ), "DB vectors column count mismatch" + assert ( + final_query_vectors.shape[1] == expected_feature_count + ), "Query vectors column count mismatch" + + +def test_annoy_post_process(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"], ["q2", "Charlie", "Artist"]] + weights = [1.0, 1.0, 1.0] # Example weights + + closest_records, error = annoy_post_process(load_records, query_records, weights) + + # Assert the closest records + assert ( + len(closest_records) == 2 + ) # We expect two results (one for each query record) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + + # No errors expected + assert error is None + + +def test_single_record_match_annoy_post_process(): + # Mock data where only the first query record matches the first load record + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"]] + weights = [1.0, 1.0, 1.0] + + closest_records, error = annoy_post_process(load_records, query_records, weights) + + # Both the load records should be matched with the only query record we have + assert len(closest_records) == 2 + assert closest_records[0]["id"] == "q1" + assert error is None From 730ba6ca3d38b04c8cf80f49700cf0883dc1dccb Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 11 Nov 2024 20:16:30 +0530 Subject: [PATCH 27/65] Add tests for parent level similarity and priority fields --- cumulusci/core/tests/test_datasets_e2e.py | 4 + ...generate_load_mapping_from_declarations.py | 17 + cumulusci/tasks/bulkdata/load.py | 167 +-- cumulusci/tasks/bulkdata/mapping_parser.py | 5 +- .../tasks/bulkdata/query_transformers.py | 32 +- cumulusci/tasks/bulkdata/select_utils.py | 20 +- cumulusci/tasks/bulkdata/step.py | 55 +- .../tasks/bulkdata/tests/mapping_select.yml | 20 + .../tests/mapping_select_invalid_strategy.yml | 20 + ...mapping_select_missing_priority_fields.yml | 22 + .../mapping_select_no_priority_fields.yml | 18 + cumulusci/tasks/bulkdata/tests/test_load.py | 114 ++ .../bulkdata/tests/test_mapping_parser.py | 36 + .../tests/test_query_db_joins_lookups.sql | 16 +- .../test_query_db_joins_lookups_select.yml | 48 + .../tasks/bulkdata/tests/test_select_utils.py | 209 +++- cumulusci/tasks/bulkdata/tests/test_step.py | 1091 ++++++++++++----- 17 files changed, 1442 insertions(+), 452 deletions(-) create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select.yml create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml create mode 100644 cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml diff --git a/cumulusci/core/tests/test_datasets_e2e.py b/cumulusci/core/tests/test_datasets_e2e.py index c5140d3609..387ad696ad 100644 --- a/cumulusci/core/tests/test_datasets_e2e.py +++ b/cumulusci/core/tests/test_datasets_e2e.py @@ -304,6 +304,7 @@ def write_yaml(filename: str, json: Any): "after": "Insert Account", } }, + "select_options": {}, }, "Insert Event": { "sf_object": "Event", @@ -316,16 +317,19 @@ def write_yaml(filename: str, json: Any): "after": "Insert Lead", } }, + "select_options": {}, }, "Insert Account": { "sf_object": "Account", "table": "Account", "fields": ["Name"], + "select_options": {}, }, "Insert Lead": { "sf_object": "Lead", "table": "Lead", "fields": ["Company", "LastName"], + "select_options": {}, }, } assert tuple(actual.items()) == tuple(expected.items()), actual.items() diff --git a/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py b/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py index 7dbaefc740..69dd0e361d 100644 --- a/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py +++ b/cumulusci/tasks/bulkdata/generate_mapping_utils/tests/test_generate_load_mapping_from_declarations.py @@ -41,6 +41,7 @@ def test_simple_generate_mapping_from_declarations(self, org_config): "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, } } @@ -74,11 +75,13 @@ def test_generate_mapping_from_both_kinds_of_declarations(self, org_config): "sf_object": "Contact", "table": "Contact", "fields": ["FirstName", "LastName"], + "select_options": {}, }, "Insert Account": { "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, }.items() ) @@ -111,6 +114,7 @@ def test_generate_load_mapping_from_declarations__lookups(self, org_config): "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -119,6 +123,7 @@ def test_generate_load_mapping_from_declarations__lookups(self, org_config): "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, } @@ -157,6 +162,7 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "sf_object": "Account", "table": "Account", "fields": ["Name", "Description"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -165,11 +171,13 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, "Insert Lead": { "sf_object": "Lead", "table": "Lead", "fields": ["LastName", "Company"], + "select_options": {}, }, "Insert Event": { "sf_object": "Event", @@ -178,6 +186,7 @@ def test_generate_load_mapping_from_declarations__polymorphic_lookups( "lookups": { "WhoId": {"table": ["Contact", "Lead"], "key_field": "WhoId"} }, + "select_options": {}, }, } @@ -221,6 +230,7 @@ def test_generate_load_mapping_from_declarations__circular_lookups( }, "sf_object": "Account", "table": "Account", + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", @@ -229,6 +239,7 @@ def test_generate_load_mapping_from_declarations__circular_lookups( "lookups": { "AccountId": {"table": ["Account"], "key_field": "AccountId"} }, + "select_options": {}, }, }, mf @@ -252,11 +263,13 @@ def test_generate_load_mapping__with_load_declarations(self, org_config): "sf_object": "Account", "api": DataApi.REST, "table": "Account", + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", "api": DataApi.BULK, "table": "Contact", + "select_options": {}, }, }, mf @@ -288,6 +301,7 @@ def test_generate_load_mapping__with_upserts(self, org_config): "Insert Account": { "sf_object": "Account", "table": "Account", + "select_options": {}, }, "Upsert Account Name": { "sf_object": "Account", @@ -295,6 +309,7 @@ def test_generate_load_mapping__with_upserts(self, org_config): "action": DataOperationType.UPSERT, "update_key": ("Name",), "fields": ["Name"], + "select_options": {}, }, "Etl_Upsert Account AccountNumber_Name": { "sf_object": "Account", @@ -302,10 +317,12 @@ def test_generate_load_mapping__with_upserts(self, org_config): "action": DataOperationType.ETL_UPSERT, "update_key": ("AccountNumber", "Name"), "fields": ["AccountNumber", "Name"], + "select_options": {}, }, "Insert Contact": { "sf_object": "Contact", "table": "Contact", + "select_options": {}, }, }, mf diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index ced885744b..9a2f08ee90 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -310,6 +310,90 @@ def _execute_step( return step.job_result + def process_lookup_fields(self, mapping, fields, polymorphic_fields): + """Modify fields and priority fields based on lookup and polymorphic checks.""" + for name, lookup in mapping.lookups.items(): + if name in fields: + # Get the index of the lookup field before removing it + insert_index = fields.index(name) + # Remove the lookup field from fields + fields.remove(name) + + # Do the same for priority fields + lookup_in_priority_fields = False + if name in mapping.select_options.priority_fields: + # Set flag to True + lookup_in_priority_fields = True + # Remove the lookup field from priority fields + del mapping.select_options.priority_fields[name] + + # Check if this lookup field is polymorphic + if ( + name in polymorphic_fields + and len(polymorphic_fields[name]["referenceTo"]) > 1 + ): + # Convert to list if string + if not isinstance(lookup.table, list): + lookup.table = [lookup.table] + # Polymorphic field handling + polymorphic_references = lookup.table + relationship_name = polymorphic_fields[name]["relationshipName"] + + # Loop through each polymorphic type (e.g., Contact, Lead) + for ref_type in polymorphic_references: + # Find the mapping step for this polymorphic type + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.table == ref_type + ), + None, + ) + if lookup_mapping_step: + lookup_fields = lookup_mapping_step.get_load_field_list() + # Insert fields in the format {relationship_name}.{ref_type}.{lookup_field} + for field in lookup_fields: + fields.insert( + insert_index, + f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}", + ) + insert_index += 1 + if lookup_in_priority_fields: + mapping.select_options.priority_fields[ + f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}" + ] = f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}" + + else: + # Non-polymorphic field handling + lookup_table = lookup.table + + if isinstance(lookup_table, list): + lookup_table = lookup_table[0] + + # Get the mapping step for the non-polymorphic reference + lookup_mapping_step = next( + ( + step + for step in self.mapping.values() + if step.table == lookup_table + ), + None, + ) + + if lookup_mapping_step: + relationship_name = polymorphic_fields[name]["relationshipName"] + lookup_fields = lookup_mapping_step.get_load_field_list() + + # Insert the new fields at the same position as the removed lookup field + for field in lookup_fields: + fields.insert(insert_index, f"{relationship_name}.{field}") + insert_index += 1 + if lookup_in_priority_fields: + mapping.select_options.priority_fields[ + f"{relationship_name}.{field}" + ] = f"{relationship_name}.{field}" + def configure_step(self, mapping): """Create a step appropriate to the action""" bulk_mode = mapping.bulk_mode or self.bulk_mode or "Parallel" @@ -370,85 +454,7 @@ def configure_step(self, mapping): for field in describe_result["fields"] if field["type"] == "reference" } - - # Loop through each lookup to get the corresponding fields - for name, lookup in mapping.lookups.items(): - if name in fields: - # Get the index of the lookup field before removing it - insert_index = fields.index(name) - # Remove the lookup field from fields - fields.remove(name) - - # Check if this lookup field is polymorphic - if ( - name in polymorphic_fields - and len(polymorphic_fields[name]["referenceTo"]) > 1 - ): - # Convert to list if string - if not isinstance(lookup.table, list): - lookup.table = [lookup.table] - # Polymorphic field handling - polymorphic_references = lookup.table - relationship_name = polymorphic_fields[name][ - "relationshipName" - ] - - # Loop through each polymorphic type (e.g., Contact, Lead) - for ref_type in polymorphic_references: - # Find the mapping step for this polymorphic type - lookup_mapping_step = next( - ( - step - for step in self.mapping.values() - if step.sf_object == ref_type - ), - None, - ) - - if lookup_mapping_step: - lookup_fields = ( - lookup_mapping_step.get_load_field_list() - ) - # Insert fields in the format {relationship_name}.{ref_type}.{lookup_field} - for field in lookup_fields: - fields.insert( - insert_index, - f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}", - ) - insert_index += 1 - - else: - # Non-polymorphic field handling - lookup_table = lookup.table - - if isinstance(lookup_table, list): - lookup_table = lookup_table[0] - - # Get the mapping step for the non-polymorphic reference - lookup_mapping_step = next( - ( - step - for step in self.mapping.values() - if step.sf_object == lookup_table - ), - None, - ) - - if lookup_mapping_step: - relationship_name = polymorphic_fields[name][ - "relationshipName" - ] - lookup_fields = ( - lookup_mapping_step.get_load_field_list() - ) - - # Insert the new fields at the same position as the removed lookup field - for field in lookup_fields: - fields.insert( - insert_index, f"{relationship_name}.{field}" - ) - insert_index += 1 - + self.process_lookup_fields(mapping, fields, polymorphic_fields) else: action = mapping.action @@ -503,9 +509,6 @@ def _stream_queried_data(self, mapping, local_ids, query): pkey = row[0] row = list(row[1:]) + statics - # Replace None values in row with empty strings - row = [value if value is not None else "" for value in row] - if mapping.anchor_date and (date_context[0] or date_context[1]): row = adjust_relative_dates( mapping, date_context, row, DataOperationType.INSERT diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index 6bad4f7bdd..1593dc97a1 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -124,16 +124,19 @@ def split_update_key(cls, val): def validate_priority_fields(cls, values): select_options = values.get("select_options") fields_ = values.get("fields_", {}) + lookups = values.get("lookups", {}) if select_options and select_options.priority_fields: priority_field_names = set(select_options.priority_fields.keys()) field_names = set(fields_.keys()) + lookup_names = set(lookups.keys()) # Check if all priority fields are present in the fields missing_fields = priority_field_names - field_names + missing_fields = missing_fields - lookup_names if missing_fields: raise ValueError( - f"Priority fields {missing_fields} are not present in 'fields'" + f"Priority fields {missing_fields} are not present in 'fields' or 'lookups'" ) return values diff --git a/cumulusci/tasks/bulkdata/query_transformers.py b/cumulusci/tasks/bulkdata/query_transformers.py index b4daa4bd93..f99689618e 100644 --- a/cumulusci/tasks/bulkdata/query_transformers.py +++ b/cumulusci/tasks/bulkdata/query_transformers.py @@ -3,6 +3,7 @@ from sqlalchemy import String, and_, func, text from sqlalchemy.orm import Query, aliased +from sqlalchemy.sql import literal_column from cumulusci.core.exceptions import BulkDataException @@ -106,7 +107,10 @@ def columns_to_add(self): for lookup in self.lookups: tables = lookup.table if isinstance(lookup.table, list) else [lookup.table] lookup.aliased_table = [ - aliased(self.metadata.tables[table]) for table in tables + aliased( + self.metadata.tables[table], name=f"{lookup.name}_{table}_alias" + ) + for table in tables ] for aliased_table, table_name in zip(lookup.aliased_table, tables): @@ -122,16 +126,24 @@ def columns_to_add(self): if lookup_mapping_step: load_fields = lookup_mapping_step.get_load_field_list() for field in load_fields: - matching_column = next( - ( - col - for col in aliased_table.columns - if col.name == lookup_mapping_step.fields[field] + if field in lookup_mapping_step.fields: + matching_column = next( + ( + col + for col in aliased_table.columns + if col.name == lookup_mapping_step.fields[field] + ) + ) + columns.append( + matching_column.label(f"{aliased_table.name}_{field}") + ) + else: + # Append an empty string if the field is not present + columns.append( + literal_column("''").label( + f"{aliased_table.name}_{field}" + ) ) - ) - columns.append( - matching_column.label(f"{aliased_table.name}_{field}") - ) return columns @cached_property diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 6de6adf652..f5800f9b38 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -50,8 +50,12 @@ class SelectOptions(CCIDictModel): def validate_strategy(cls, value): if isinstance(value, Enum): return value - if value is not None: - return ENUM_VALUES.get(value.lower()) + + if value: + matched_strategy = ENUM_VALUES.get(value.lower()) + if matched_strategy: + return matched_strategy + raise ValueError(f"Invalid strategy value: {value}") @validator("priority_fields", pre=True) @@ -260,6 +264,10 @@ def similarity_post_process( return [], error_message load_records = list(load_records) + # Replace None values in each row with empty strings + for idx, row in enumerate(load_records): + row = [value if value is not None else "" for value in row] + load_records[idx] = row load_record_count, query_record_count = len(load_records), len(query_records) complexity_constant = load_record_count * query_record_count @@ -514,6 +522,7 @@ def vectorize_records(db_records, query_records, hash_features, weights): df_query = pd.DataFrame(query_records) # Determine field types and corresponding weights + # Modifies boolean columns to True or False ( numerical_features, boolean_features, @@ -523,6 +532,13 @@ def vectorize_records(db_records, query_records, hash_features, weights): categorical_weights, ) = determine_field_types(df_db, weights) + # Modify query dataframe boolean columns to True or False + for col in df_query.columns: + if df_query[col].str.lower().isin(["true", "false"]).all(): + df_query[col] = ( + df_query[col].str.lower().map({"true": True, "false": False}) + ) + # Fit StandardScaler on the numerical features of the database records scaler = StandardScaler() if numerical_features: diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index ba0243c033..3e60ef91c0 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -14,7 +14,7 @@ import salesforce_bulk from cumulusci.core.enums import StrEnum -from cumulusci.core.exceptions import BulkDataException, SOQLQueryException +from cumulusci.core.exceptions import BulkDataException from cumulusci.core.utils import process_bool_arg from cumulusci.tasks.bulkdata.select_utils import ( SelectOperationExecutor, @@ -879,59 +879,6 @@ def select_records(self, records): total_row_errors=0, ) - def _execute_composite_query(self, select_query, user_query, query_fields): - """Executes a composite request with two queries and returns the results.""" - - def convert(rec, fields): - """Helper function to convert record values to strings, handling None values""" - return [str(rec[f]) if rec[f] is not None else "" for f in fields] - - composite_request_json = { - "compositeRequest": [ - { - "method": "GET", - "url": requests.utils.requote_uri( - f"/services/data/v{self.sf.sf_version}/query/?q={select_query}" - ), - "referenceId": "select_query", - }, - { - "method": "GET", - "url": requests.utils.requote_uri( - f"/services/data/v{self.sf.sf_version}/query/?q={user_query}" - ), - "referenceId": "user_query", - }, - ] - } - response = self.sf.restful( - "composite", method="POST", json=composite_request_json - ) - - # Extract results based on referenceId - for sub_response in response["compositeResponse"]: - if ( - sub_response["referenceId"] == "select_query" - and sub_response["httpStatusCode"] == 200 - ): - select_query_records = list( - convert(rec, query_fields) - for rec in sub_response["body"]["records"] - ) - elif ( - sub_response["referenceId"] == "user_query" - and sub_response["httpStatusCode"] == 200 - ): - user_query_records = list( - convert(rec, ["Id"]) for rec in sub_response["body"]["records"] - ) - else: - raise SOQLQueryException( - f"{sub_response['body'][0]['errorCode']}: {sub_response['body'][0]['message']}" - ) - - return user_query_records, select_query_records - def get_results(self): """Return a generator of DataOperationResult objects.""" diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select.yml b/cumulusci/tasks/bulkdata/tests/mapping_select.yml new file mode 100644 index 0000000000..e549d7a474 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select.yml @@ -0,0 +1,20 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml new file mode 100644 index 0000000000..6ab196fda6 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_strategy.yml @@ -0,0 +1,20 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: invalid_strategy + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml new file mode 100644 index 0000000000..34011945ad --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_missing_priority_fields.yml @@ -0,0 +1,22 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + - Name + - AccountNumber + - ParentId + - Email + fields: + - Name + - AccountNumber + - Description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml new file mode 100644 index 0000000000..1559848b48 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_no_priority_fields.yml @@ -0,0 +1,18 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + fields: + - Name + - AccountNumber + - Description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/test_load.py b/cumulusci/tasks/bulkdata/tests/test_load.py index 6649ff202e..9fb6ea1d87 100644 --- a/cumulusci/tasks/bulkdata/tests/test_load.py +++ b/cumulusci/tasks/bulkdata/tests/test_load.py @@ -806,6 +806,111 @@ def test_stream_queried_data__skips_empty_rows(self): ["001000000006", "001000000008"], ] == records + def test_process_lookup_fields_polymorphic(self): + task = _make_task( + LoadData, + { + "options": { + "sql_path": Path(__file__).parent + / "test_query_db_joins_lookups.sql", + "mapping": Path(__file__).parent + / "test_query_db_joins_lookups_select.yml", + } + }, + ) + polymorphic_fields = { + "WhoId": { + "name": "WhoId", + "referenceTo": ["Contact", "Lead"], + "relationshipName": "Who", + }, + "WhatId": { + "name": "WhatId", + "referenceTo": ["Account"], + "relationshipName": "What", + }, + } + + expected_fields = [ + "Subject", + "Who.Contact.FirstName", + "Who.Contact.LastName", + "Who.Contact.AccountId", + "Who.Lead.LastName", + ] + expected_priority_fields_keys = { + "Who.Contact.FirstName", + "Who.Contact.LastName", + "Who.Contact.AccountId", + "Who.Lead.LastName", + } + with mock.patch( + "cumulusci.tasks.bulkdata.load.validate_and_inject_mapping" + ), mock.patch.object(task, "sf", create=True): + task._init_mapping() + with task._init_db(): + task._old_format = mock.Mock(return_value=False) + mapping = task.mapping["Select Event"] + fields = mapping.get_load_field_list() + task.process_lookup_fields( + mapping=mapping, fields=fields, polymorphic_fields=polymorphic_fields + ) + assert fields == expected_fields + assert ( + set(mapping.select_options.priority_fields.keys()) + == expected_priority_fields_keys + ) + + def test_process_lookup_fields_non_polymorphic(self): + task = _make_task( + LoadData, + { + "options": { + "sql_path": Path(__file__).parent + / "test_query_db_joins_lookups.sql", + "mapping": Path(__file__).parent + / "test_query_db_joins_lookups_select.yml", + } + }, + ) + non_polymorphic_fields = { + "AccountId": { + "name": "AccountId", + "referenceTo": ["Account"], + "relationshipName": "Account", + } + } + + expected_fields = [ + "FirstName", + "LastName", + "Account.Name", + "Account.AccountNumber", + ] + expected_priority_fields_keys = { + "FirstName", + "Account.Name", + "Account.AccountNumber", + } + with mock.patch( + "cumulusci.tasks.bulkdata.load.validate_and_inject_mapping" + ), mock.patch.object(task, "sf", create=True): + task._init_mapping() + with task._init_db(): + task._old_format = mock.Mock(return_value=False) + mapping = task.mapping["Select Contact"] + fields = mapping.get_load_field_list() + task.process_lookup_fields( + mapping=mapping, + fields=fields, + polymorphic_fields=non_polymorphic_fields, + ) + assert fields == expected_fields + assert ( + set(mapping.select_options.priority_fields.keys()) + == expected_priority_fields_keys + ) + @responses.activate def test_stream_queried_data__adjusts_relative_dates(self): mock_describe_calls() @@ -878,6 +983,15 @@ def test_query_db__joins_self_lookups(self): old_format=True, ) + def test_query_db__joins_select_lookups(self): + """SQL File in New Format (Select)""" + _validate_query_for_mapping_step( + sql_path=Path(__file__).parent / "test_query_db_joins_lookups.sql", + mapping=Path(__file__).parent / "test_query_db_joins_lookups_select.yml", + mapping_step_name="Select Event", + expected='''SELECT events.id AS events_id, events."subject" AS "events_subject", "whoid_contacts_alias"."firstname" AS "whoid_contacts_alias_firstname", "whoid_contacts_alias"."lastname" AS "whoid_contacts_alias_lastname", '' AS "whoid_contacts_alias_accountid", "whoid_leads_alias"."lastname" AS "whoid_leads_alias_lastname" from events LEFT OUTER JOIN contacts AS "whoid_contacts_alias" ON "whoid_contacts_alias".id=events."whoid" LEFT OUTER JOIN leads AS "whoid_leads_alias" ON "whoid_leads_alias".id=events."whoid" ORDER BY events."whoid"''', + ) + def test_query_db__joins_polymorphic_lookups(self): """SQL File in New Format (Polymorphic)""" _validate_query_for_mapping_step( diff --git a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py index c1419f300b..ae9fe91686 100644 --- a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +++ b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py @@ -17,6 +17,7 @@ parse_from_yaml, validate_and_inject_mapping, ) +from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import DataApi, DataOperationType from cumulusci.tests.util import DummyOrgConfig, mock_describe_calls @@ -213,6 +214,41 @@ def test_get_relative_date_e2e(self): date.today(), ) + def test_select_options__success(self): + base_path = Path(__file__).parent / "mapping_select.yml" + result = parse_from_yaml(base_path) + + step = result["Select Accounts"] + select_options = step.select_options + assert select_options + assert select_options.strategy == SelectStrategy.SIMILARITY + assert select_options.filter == "WHEN Name in ('Sample Account')" + assert select_options.priority_fields + + def test_select_options__invalid_strategy(self): + base_path = Path(__file__).parent / "mapping_select_invalid_strategy.yml" + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "Invalid strategy value: invalid_strategy" in str(e.value) + + def test_select_options__missing_priority_fields(self): + base_path = Path(__file__).parent / "mapping_select_missing_priority_fields.yml" + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + print(str(e.value)) + assert ( + "Priority fields {'Email'} are not present in 'fields' or 'lookups'" + in str(e.value) + ) + + def test_select_options__no_priority_fields(self): + base_path = Path(__file__).parent / "mapping_select_no_priority_fields.yml" + result = parse_from_yaml(base_path) + + step = result["Select Accounts"] + select_options = step.select_options + assert select_options.priority_fields == {} + # Start of FLS/Namespace Injection Unit Tests def test_is_injectable(self): diff --git a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql index 113e5cebe5..ed7f0e694a 100644 --- a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql +++ b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups.sql @@ -1,13 +1,23 @@ BEGIN TRANSACTION; +CREATE TABLE "accounts" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "AccountNumber" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "accounts" VALUES("Account-1",'Bluth Company','123456'); +INSERT INTO "accounts" VALUES("Account-2",'Sampson PLC','567890'); + CREATE TABLE "contacts" ( id VARCHAR(255) NOT NULL, "FirstName" VARCHAR(255), - "LastName" VARCHAR(255), + "LastName" VARCHAR(255), + "AccountId" VARCHAR(255), PRIMARY KEY (id) ); -INSERT INTO "contacts" VALUES("Contact-1",'Alpha','gamma'); -INSERT INTO "contacts" VALUES("Contact-2",'Temp','Bluth'); +INSERT INTO "contacts" VALUES("Contact-1",'Alpha','gamma', 'Account-2'); +INSERT INTO "contacts" VALUES("Contact-2",'Temp','Bluth', 'Account-1'); CREATE TABLE "events" ( id VARCHAR(255) NOT NULL, diff --git a/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml new file mode 100644 index 0000000000..4b37f491eb --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_query_db_joins_lookups_select.yml @@ -0,0 +1,48 @@ +Insert Account: + sf_object: Account + table: accounts + api: rest + fields: + - Name + - AccountNumber + +Insert Lead: + sf_object: Lead + table: leads + api: bulk + fields: + - LastName + +Select Contact: + sf_object: Contact + table: contacts + api: bulk + action: select + select_options: + strategy: similarity + priority_fields: + - FirstName + - AccountId + fields: + - FirstName + - LastName + lookups: + AccountId: + table: accounts + +Select Event: + sf_object: Event + table: events + api: rest + action: select + select_options: + strategy: similarity + priority_fields: + - WhoId + fields: + - Subject + lookups: + WhoId: + table: + - contacts + - leads diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 26768d4ea1..4969722c6e 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -4,6 +4,7 @@ from cumulusci.tasks.bulkdata.select_utils import ( SelectOperationExecutor, SelectStrategy, + add_limit_offset_to_user_filter, annoy_post_process, calculate_levenshtein_distance, determine_field_types, @@ -21,7 +22,7 @@ def test_standard_generate_query_with_default_record_declaration(): limit = 5 offset = 2 query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], limit=limit, offset=offset + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) assert "WHERE" in query # Ensure WHERE clause is included @@ -36,7 +37,7 @@ def test_standard_generate_query_without_default_record_declaration(): limit = 3 offset = None query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], limit=limit, offset=offset + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) assert "WHERE" not in query # No WHERE clause should be present @@ -45,6 +46,23 @@ def test_standard_generate_query_without_default_record_declaration(): assert fields == ["Id"] +def test_standard_generate_query_with_user_filter(): + select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + user_filter = "WHERE Name IN ('Sample Contact')" + query, fields = select_operator.select_generate_query( + sobject=sobject, fields=[], user_filter=user_filter, limit=limit, offset=offset + ) + + assert "WHERE" in query + assert "Sample Contact" in query + assert "LIMIT" in query + assert "OFFSET" not in query + assert fields == ["Id"] + + # Test Cases for random generate query def test_random_generate_query_with_default_record_declaration(): select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) @@ -52,7 +70,7 @@ def test_random_generate_query_with_default_record_declaration(): limit = 5 offset = 2 query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], limit=limit, offset=offset + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) assert "WHERE" in query # Ensure WHERE clause is included @@ -67,7 +85,7 @@ def test_random_generate_query_without_default_record_declaration(): limit = 3 offset = None query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], limit=limit, offset=offset + sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) assert "WHERE" not in query # No WHERE clause should be present @@ -83,7 +101,7 @@ def test_standard_post_process_with_records(): num_records = 3 sobject = "Contact" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[] ) assert error_message is None @@ -99,7 +117,7 @@ def test_standard_post_process_with_fewer_records(): num_records = 3 sobject = "Opportunity" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[] ) assert error_message is None @@ -116,7 +134,7 @@ def test_standard_post_process_with_no_records(): num_records = 2 sobject = "Lead" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[] ) assert selected_records == [] @@ -130,7 +148,7 @@ def test_random_post_process_with_records(): num_records = 3 sobject = "Contact" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[] ) assert error_message is None @@ -145,7 +163,7 @@ def test_random_post_process_with_no_records(): num_records = 2 sobject = "Lead" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[] ) assert selected_records == [] @@ -159,7 +177,7 @@ def test_similarity_generate_query_with_default_record_declaration(): limit = 5 offset = 2 query, fields = select_operator.select_generate_query( - sobject, ["Name"], limit, offset + sobject, ["Name"], [], limit, offset ) assert "WHERE" in query # Ensure WHERE clause is included @@ -174,7 +192,7 @@ def test_similarity_generate_query_without_default_record_declaration(): limit = 3 offset = None query, fields = select_operator.select_generate_query( - sobject, ["Name"], limit, offset + sobject, ["Name"], [], limit, offset ) assert "WHERE" not in query # No WHERE clause should be present @@ -183,6 +201,59 @@ def test_similarity_generate_query_without_default_record_declaration(): assert "OFFSET" not in query +def test_similarity_generate_query_with_nested_fields(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Event" # Assuming no declaration for this object + limit = 3 + offset = None + fields = [ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ] + query, query_fields = select_operator.select_generate_query( + sobject, fields, [], limit, offset + ) + + assert "WHERE" not in query # No WHERE clause should be present + assert query_fields == [ + "Id", + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ] + assert f"LIMIT {limit}" in query + assert "TYPEOF Who" in query + assert "WHEN Contact" in query + assert "WHEN Lead" in query + assert "OFFSET" not in query + + +def test_random_generate_query_with_user_filter(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + sobject = "Contact" # Assuming no declaration for this object + limit = 3 + offset = None + user_filter = "WHERE Name IN ('Sample Contact')" + query, fields = select_operator.select_generate_query( + sobject=sobject, + fields=["Name"], + user_filter=user_filter, + limit=limit, + offset=offset, + ) + + assert "WHERE" in query + assert "Sample Contact" in query + assert "LIMIT" in query + assert "OFFSET" not in query + assert fields == ["Id", "Name"] + + def test_levenshtein_distance(): assert levenshtein_distance("kitten", "kitten") == 0 # Identical strings assert levenshtein_distance("kitten", "sitten") == 1 # One substitution @@ -284,7 +355,7 @@ def test_similarity_post_process_with_no_records(): num_records = 2 sobject = "Lead" selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject + None, records, num_records, sobject, weights=[1, 1, 1] ) assert selected_records == [] @@ -305,6 +376,34 @@ def test_calculate_levenshtein_distance_basic(): expected_distance ), "Basic distance calculation failed." + # Empty fields + record1 = ["hello", ""] + record2 = ["hullo", ""] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("", "") = 0 + expected_distance = (1 * 1.0 + 0 * 1.0) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation with empty fields failed." + + # Partial empty fields + record1 = ["hello", "world"] + record2 = ["hullo", ""] + weights = [1.0, 1.0] + + # Expected distance based on simple Levenshtein distances + # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "") = 5 + expected_distance = (1 * 1.0 + 5 * 0.05 * 1.0) / 2 # Averaged over two fields + + result = calculate_levenshtein_distance(record1, record2, weights) + assert result == pytest.approx( + expected_distance + ), "Basic distance calculation with partial empty fields failed." + def test_calculate_levenshtein_distance_weighted(): record1 = ["cat", "dog"] @@ -320,6 +419,26 @@ def test_calculate_levenshtein_distance_weighted(): ), "Weighted distance calculation failed." +def test_calculate_levenshtein_distance_records_length_doesnt_match(): + record1 = ["cat", "dog", "cow"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5] + + with pytest.raises(ValueError) as e: + calculate_levenshtein_distance(record1, record2, weights) + assert "Records must have the same number of fields." in str(e.value) + + +def test_calculate_levenshtein_distance_weights_length_doesnt_match(): + record1 = ["cat", "dog"] + record2 = ["bat", "fog"] + weights = [2.0, 0.5, 3.0] + + with pytest.raises(ValueError) as e: + calculate_levenshtein_distance(record1, record2, weights) + assert "Records must be same size as fields (weights)." in str(e.value) + + def test_replace_empty_strings_with_missing(): # Case 1: Normal case with some empty strings records = [ @@ -419,11 +538,11 @@ def test_mixed_types(): assert determine_field_types(df, weights) == expected_output -def test_vectorize_records_mixed_numerical_categorical(): +def test_vectorize_records_mixed_numerical_boolean_categorical(): # Test data with mixed types: numerical and categorical only - db_records = [["1.0", "apple"], ["2.0", "banana"]] - query_records = [["1.5", "apple"], ["2.5", "cherry"]] - weights = [1.0, 1.0] # Equal weights for numerical and categorical columns + db_records = [["1.0", "true", "apple"], ["2.0", "false", "banana"]] + query_records = [["1.5", "true", "apple"], ["2.5", "false", "cherry"]] + weights = [1.0, 1.0, 1.0] # Equal weights for numerical and categorical columns hash_features = 4 # Number of hashing vectorizer features for categorical columns final_db_vectors, final_query_vectors = vectorize_records( @@ -437,7 +556,7 @@ def test_vectorize_records_mixed_numerical_categorical(): ), "Query vectors row count mismatch" # Expected dimensions: numerical (1) + categorical hashed features (4) - expected_feature_count = 1 + hash_features + expected_feature_count = 2 + hash_features assert ( final_db_vectors.shape[1] == expected_feature_count ), "DB vectors column count mismatch" @@ -478,3 +597,59 @@ def test_single_record_match_annoy_post_process(): assert len(closest_records) == 2 assert closest_records[0]["id"] == "q1" assert error is None + + +@pytest.mark.parametrize( + "filter_clause, limit_clause, offset_clause, expected", + [ + # Test: No existing LIMIT/OFFSET and no new clauses + ("SELECT * FROM users", None, None, " SELECT * FROM users"), + # Test: Existing LIMIT and no new limit provided + ("SELECT * FROM users LIMIT 100", None, None, "SELECT * FROM users LIMIT 100"), + # Test: Existing OFFSET and no new offset provided + ("SELECT * FROM users OFFSET 20", None, None, "SELECT * FROM users OFFSET 20"), + # Test: Existing LIMIT/OFFSET and new clauses provided + ( + "SELECT * FROM users LIMIT 100 OFFSET 20", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + # Test: Existing LIMIT, new limit larger than existing (should keep the smaller one) + ("SELECT * FROM users LIMIT 100", 150, None, "SELECT * FROM users LIMIT 100"), + # Test: New limit smaller than existing (should use the new one) + ("SELECT * FROM users LIMIT 100", 50, None, "SELECT * FROM users LIMIT 50"), + # Test: Existing OFFSET, adding a new offset (should sum the offsets) + ("SELECT * FROM users OFFSET 20", None, 30, "SELECT * FROM users OFFSET 50"), + # Test: Existing LIMIT/OFFSET and new values set to None + ( + "SELECT * FROM users LIMIT 100 OFFSET 20", + None, + None, + "SELECT * FROM users LIMIT 100 OFFSET 20", + ), + # Test: Removing existing LIMIT and adding a new one + ("SELECT * FROM users LIMIT 200", 50, None, "SELECT * FROM users LIMIT 50"), + # Test: Removing existing OFFSET and adding a new one + ("SELECT * FROM users OFFSET 40", None, 20, "SELECT * FROM users OFFSET 60"), + # Edge case: Filter clause with mixed cases + ( + "SELECT * FROM users LiMiT 100 oFfSeT 20", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + # Test: Filter clause with trailing/leading spaces + ( + " SELECT * FROM users LIMIT 100 OFFSET 20 ", + 50, + 10, + "SELECT * FROM users LIMIT 50 OFFSET 30", + ), + ], +) +def test_add_limit_offset_to_user_filter( + filter_clause, limit_clause, offset_clause, expected +): + result = add_limit_offset_to_user_filter(filter_clause, limit_clause, offset_clause) + assert result.strip() == expected.strip() diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index da13a9a8eb..bd059b9bbf 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1,14 +1,17 @@ import io import json +from itertools import tee from unittest import mock import pytest import responses -from cumulusci.core.exceptions import BulkDataException, SOQLQueryException +from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata.load import LoadData from cumulusci.tasks.bulkdata.select_utils import SelectStrategy from cumulusci.tasks.bulkdata.step import ( + HIGH_PRIORITY_VALUE, + LOW_PRIORITY_VALUE, BulkApiDmlOperation, BulkApiQueryOperation, BulkJobMixin, @@ -19,7 +22,10 @@ DataOperationType, RestApiDmlOperation, RestApiQueryOperation, + assign_weights, download_file, + extract_flattened_headers, + flatten_record, get_dml_operation, get_query_operation, ) @@ -546,6 +552,7 @@ def test_select_records_standard_strategy_success(self, download_mock): context=context, fields=["LastName"], selection_strategy=SelectStrategy.STANDARD, + content_type="JSON", ) # Mock Bulk API responses @@ -555,10 +562,7 @@ def test_select_records_standard_strategy_success(self, download_mock): step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] # Mock the downloaded CSV content with a single record - download_mock.return_value = io.StringIO( - """Id -003000000000001""" - ) + download_mock.return_value = io.StringIO('[{"Id":"003000000000001"}]') # Mock the _wait_for_job method to simulate a successful job step._wait_for_job = mock.Mock() @@ -607,7 +611,7 @@ def test_select_records_standard_strategy_failure__no_records(self, download_moc step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] # Mock the downloaded CSV content indicating no records found - download_mock.return_value = io.StringIO("""Records not found for this query""") + download_mock.return_value = io.StringIO("[]") # Mock the _wait_for_job method to simulate a successful job step._wait_for_job = mock.Mock() @@ -654,51 +658,34 @@ def test_select_records_user_selection_filter_success(self, download_mock): step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] # Mock the downloaded CSV content with a single record - download_mock.return_value = io.StringIO( - """Id -003000000000001 -003000000000002 -003000000000003""" + download_mock.return_value = io.StringIO('[{"Id":"003000000000001"}]') + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 ) - # Mock the query operation - with mock.patch( - "cumulusci.tasks.bulkdata.step.get_query_operation" - ) as query_operation_mock: - query_operation_mock.return_value = mock.Mock() - query_operation_mock.return_value.query = mock.Mock() - query_operation_mock.return_value.get_results = mock.Mock() - query_operation_mock.return_value.get_results.return_value = [ - ["003000000000001"] - ] - # Mock the _wait_for_job method to simulate a successful job - step._wait_for_job = mock.Mock() - step._wait_for_job.return_value = DataOperationJobResult( - DataOperationStatus.SUCCESS, [], 0, 0 - ) + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) - # Prepare input records - records = iter([["Test1"], ["Test2"], ["Test3"]]) + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() - # Execute the select_records operation - step.start() - step.select_records(records) - step.end() - - # Get the results and assert their properties - results = list(step.get_results()) - assert ( - len(results) == 3 - ) # Expect 3 results (matching the input records count) - # Assert that all results have the expected ID, success, and created values - assert ( - results.count( - DataOperationResult( - id="003000000000001", success=True, error="", created=False - ) + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False ) - == 3 ) + == 3 + ) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_user_selection_filter_order_success(self, download_mock): @@ -722,47 +709,29 @@ def test_select_records_user_selection_filter_order_success(self, download_mock) # Mock the downloaded CSV content with a single record download_mock.return_value = io.StringIO( - """Id -003000000000001 -003000000000002 -003000000000003""" + '[{"Id":"003000000000003"}, {"Id":"003000000000001"}, {"Id":"003000000000002"}]' + ) + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 ) - # Mock the query operation - with mock.patch( - "cumulusci.tasks.bulkdata.step.get_query_operation" - ) as query_operation_mock: - query_operation_mock.return_value = mock.Mock() - query_operation_mock.return_value.query = mock.Mock() - query_operation_mock.return_value.get_results = mock.Mock() - query_operation_mock.return_value.get_results.return_value = [ - ["003000000000003"], - ["003000000000001"], - ["003000000000002"], - ] - # Mock the _wait_for_job method to simulate a successful job - step._wait_for_job = mock.Mock() - step._wait_for_job.return_value = DataOperationJobResult( - DataOperationStatus.SUCCESS, [], 0, 0 - ) + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) - # Prepare input records - records = iter([["Test1"], ["Test2"], ["Test3"]]) + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() - # Execute the select_records operation - step.start() - step.select_records(records) - step.end() - - # Get the results and assert their properties - results = list(step.get_results()) - assert ( - len(results) == 3 - ) # Expect 3 results (matching the input records count) - # Assert that all results are in the order given by user query - assert results[0].id == "003000000000003" - assert results[1].id == "003000000000001" - assert results[2].id == "003000000000002" + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results are in the order given by user query + assert results[0].id == "003000000000003" + assert results[1].id == "003000000000001" + assert results[2].id == "003000000000002" @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_user_selection_filter_failure(self, download_mock): @@ -785,29 +754,14 @@ def test_select_records_user_selection_filter_failure(self, download_mock): step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] # Mock the downloaded CSV content with a single record - download_mock.return_value = io.StringIO( - """Id -003000000000001 -003000000000002 -003000000000003""" - ) - # Mock the query operation - with mock.patch( - "cumulusci.tasks.bulkdata.step.get_query_operation" - ) as query_operation_mock: - query_operation_mock.return_value = mock.Mock() - query_operation_mock.return_value.query = mock.Mock() - query_operation_mock.return_value.query.side_effect = BulkDataException( - "MALFORMED QUERY" - ) - - # Prepare input records - records = iter([["Test1"], ["Test2"], ["Test3"]]) + download_mock.side_effect = BulkDataException("MALFORMED QUERY") + # Prepare input records + records = iter([["Test1"], ["Test2"], ["Test3"]]) - # Execute the select_records operation - step.start() - with pytest.raises(BulkDataException): - step.select_records(records) + # Execute the select_records operation + step.start() + with pytest.raises(BulkDataException): + step.select_records(records) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_similarity_strategy_success(self, download_mock): @@ -818,7 +772,7 @@ def test_select_records_similarity_strategy_success(self, download_mock): operation=DataOperationType.QUERY, api_options={"batch_size": 10, "update_key": "LastName"}, context=context, - fields=["Id", "Name", "Email"], + fields=["Name", "Email"], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -830,10 +784,7 @@ def test_select_records_similarity_strategy_success(self, download_mock): # Mock the downloaded CSV content with a single record download_mock.return_value = io.StringIO( - """Id,Name,Email -003000000000001,Jawad,mjawadtp@example.com -003000000000002,Aditya,aditya@example.com -003000000000003,Tom,tom@example.com""" + """[{"Id":"003000000000001", "Name":"Jawad", "Email":"mjawadtp@example.com"}, {"Id":"003000000000002", "Name":"Aditya", "Email":"aditya@example.com"}, {"Id":"003000000000003", "Name":"Tom", "Email":"tom@example.com"}]""" ) # Mock the _wait_for_job method to simulate a successful job @@ -908,7 +859,7 @@ def test_select_records_similarity_strategy_failure__no_records( step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] # Mock the downloaded CSV content indicating no records found - download_mock.return_value = io.StringIO("""Records not found for this query""") + download_mock.return_value = io.StringIO("[]") # Mock the _wait_for_job method to simulate a successful job step._wait_for_job = mock.Mock() @@ -940,6 +891,214 @@ def test_select_records_similarity_strategy_failure__no_records( assert job_result.records_processed == 0 assert job_result.total_row_errors == 0 + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_parent_level_records__polymorphic( + self, download_mock + ): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Event", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=[ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + download_mock.return_value = io.StringIO( + """[ + {"Id": "003000000000001", "Subject": "Sample Event 1", "Who":{ "attributes": {"type": "Contact"}, "Name": "Sample Contact", "Email": "contact@example.com"}}, + { "Id": "003000000000002", "Subject": "Sample Event 2", "Who":{ "attributes": {"type": "Lead"}, "Name": "Sample Lead", "Company": "Salesforce"}} + ]""" + ) + + records = iter( + [ + ["Sample Event 1", "Sample Contact", "contact@example.com", "", ""], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_parent_level_records__non_polymorphic( + self, download_mock + ): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["Name", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + download_mock.return_value = io.StringIO( + """[ + {"Id": "003000000000001", "Name": "Sample Contact 1", "Account":{ "attributes": {"type": "Account"}, "Name": "Sample Account", "AccountNumber": 123456}}, + { "Id": "003000000000002", "Subject": "Sample Contact 2", "Account": null} + ]""" + ) + + records = iter( + [ + ["Sample Contact 3", "Sample Account", "123456"], + ["Sample Contact 4", "", ""], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy_priority_fields(self, download_mock): + mock_describe_calls() + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + step_1 = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={"Name": "Name", "Email": "Email"}, + ) + + step_2 = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={ + "Account.Name": "Account.Name", + "Account.AccountNumber": "Account.AccountNumber", + }, + ) + + # Mock Bulk API responses + step_1.bulk.endpoint = "https://test" + step_1.bulk.create_query_job.return_value = "JOB" + step_1.bulk.query.return_value = "BATCH" + step_1.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + step_2.bulk.endpoint = "https://test" + step_2.bulk.create_query_job.return_value = "JOB" + step_2.bulk.query.return_value = "BATCH" + step_2.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + sample_response = [ + { + "Id": "003000000000001", + "Name": "Bob The Builder", + "Email": "bob@yahoo.org", + "Account": { + "attributes": {"type": "Account"}, + "Name": "Jawad TP", + "AccountNumber": 567890, + }, + }, + { + "Id": "003000000000002", + "Name": "Tom Cruise", + "Email": "tom@exmaple.com", + "Account": { + "attributes": {"type": "Account"}, + "Name": "Aditya B", + "AccountNumber": 123456, + }, + }, + ] + + download_mock.side_effect = [ + io.StringIO(f"""{json.dumps(sample_response)}"""), + io.StringIO(f"""{json.dumps(sample_response)}"""), + ] + + records = iter( + [ + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456"], + ] + ) + records_1, records_2 = tee(records) + step_1.start() + step_1.select_records(records_1) + step_1.end() + + step_2.start() + step_2.select_records(records_2) + step_2.end() + + # Get the results and assert their properties + results_1 = list(step_1.get_results()) + results_2 = list(step_2.get_results()) + assert ( + len(results_1) == 1 + ) # Expect 1 results (matching the input records count) + assert ( + len(results_2) == 1 + ) # Expect 1 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + # Prioritizes Name and Email + assert results_1[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + # Prioritizes Account.Name and Account.AccountNumber + assert results_2[0] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + def test_batch(self): context = mock.Mock() @@ -1344,10 +1503,101 @@ def test_select_records_standard_strategy_success(self): assert ( results.count( DataOperationResult( - id="003000000000001", success=True, error="", created=False + id="003000000000001", success=True, error="", created=False + ) + ) + == 3 + ) + + @responses.activate + def test_select_records_standard_strategy_success_pagination(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=task, + fields=["LastName"], + selection_strategy=SelectStrategy.STANDARD, + ) + + # Set up pagination: First call returns done=False, second call returns done=True + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [{"Id": "003000000000001"}, {"Id": "003000000000002"}], + "done": False, # Pagination in progress + "nextRecordsUrl": "/services/data/vXX.X/query/next-records", + }, + ] + ) + + step.sf.query_more = mock.Mock( + side_effect=[ + {"records": [{"Id": "003000000000003"}], "done": True} # Final page + ] + ) + + records = iter([["Test1"], ["Test2"], ["Test3"]]) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=False ) ) - == 3 + == 1 ) @responses.activate @@ -1448,28 +1698,10 @@ def test_select_records_user_selection_filter_success(self): ) results = { - "compositeResponse": [ - { - "body": { - "records": [ - {"Id": "003000000000001"}, - {"Id": "003000000000002"}, - {"Id": "003000000000003"}, - ] - }, - "referenceId": "select_query", - "httpStatusCode": 200, - }, - { - "body": { - "records": [ - {"Id": "003000000000001"}, - ] - }, - "referenceId": "user_query", - "httpStatusCode": 200, - }, - ] + "records": [ + {"Id": "003000000000001"}, + ], + "done": True, } step.sf.restful = mock.Mock() step.sf.restful.return_value = results @@ -1532,30 +1764,12 @@ def test_select_records_user_selection_filter_order_success(self): ) results = { - "compositeResponse": [ - { - "body": { - "records": [ - {"Id": "003000000000001"}, - {"Id": "003000000000002"}, - {"Id": "003000000000003"}, - ] - }, - "referenceId": "select_query", - "httpStatusCode": 200, - }, - { - "body": { - "records": [ - {"Id": "003000000000003"}, - {"Id": "003000000000001"}, - {"Id": "003000000000002"}, - ] - }, - "referenceId": "user_query", - "httpStatusCode": 200, - }, - ] + "records": [ + {"Id": "003000000000003"}, + {"Id": "003000000000001"}, + {"Id": "003000000000002"}, + ], + "done": True, } step.sf.restful = mock.Mock() step.sf.restful.return_value = results @@ -1612,38 +1826,12 @@ def test_select_records_user_selection_filter_failure(self): selection_filter="MALFORMED FILTER", # Applying malformed filter ) - results = { - "compositeResponse": [ - { - "body": { - "records": [ - {"Id": "003000000000001"}, - {"Id": "003000000000002"}, - {"Id": "003000000000003"}, - ] - }, - "referenceId": "select_query", - "httpStatusCode": 200, - }, - { - "body": [ - { - "message": "Error in MALFORMED FILTER", - "errorCode": "MALFORMED QUERY", - } - ], - "referenceId": "user_query", - "httpStatusCode": 400, - }, - ] - } step.sf.restful = mock.Mock() - step.sf.restful.return_value = results + step.sf.restful.side_effect = Exception("MALFORMED QUERY") records = iter([["Test1"], ["Test2"], ["Test3"]]) step.start() - with pytest.raises(SOQLQueryException) as e: + with pytest.raises(Exception): step.select_records(records) - assert "MALFORMED QUERY" in str(e.value) @responses.activate def test_select_records_similarity_strategy_success(self): @@ -1680,7 +1868,7 @@ def test_select_records_similarity_strategy_success(self): operation=DataOperationType.UPSERT, api_options={"batch_size": 10, "update_key": "LastName"}, context=task, - fields=["Id", "Name", "Email"], + fields=["Name", "Email"], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -1812,6 +2000,318 @@ def test_select_records_similarity_strategy_failure__no_records(self): assert job_result.records_processed == 0 assert job_result.total_row_errors == 0 + @responses.activate + def test_select_records_similarity_strategy_parent_level_records__polymorphic(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Event", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task, + fields=[ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [ + { + "Id": "003000000000001", + "Subject": "Sample Event 1", + "Who": { + "attributes": {"type": "Contact"}, + "Name": "Sample Contact", + "Email": "contact@example.com", + }, + }, + { + "Id": "003000000000002", + "Subject": "Sample Event 2", + "Who": { + "attributes": {"type": "Lead"}, + "Name": "Sample Lead", + "Company": "Salesforce", + }, + }, + ], + "done": True, + }, + ] + ) + + records = iter( + [ + ["Sample Event 1", "Sample Contact", "contact@example.com", "", ""], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @responses.activate + def test_select_records_similarity_strategy_parent_level_records__non_polymorphic( + self, + ): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + ) + + step.sf.restful = mock.Mock( + side_effect=[ + { + "records": [ + { + "Id": "003000000000001", + "Name": "Sample Contact 1", + "Account": { + "attributes": {"type": "Account"}, + "Name": "Sample Account", + "AccountNumber": 123456, + }, + }, + { + "Id": "003000000000002", + "Name": "Sample Contact 2", + "Account": None, + }, + ], + "done": True, + }, + ] + ) + + records = iter( + [ + ["Sample Contact 3", "Sample Account", "123456"], + ["Sample Contact 4", "", ""], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 2 # Expect 2 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + assert results[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + assert results[1] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + + @responses.activate + def test_select_records_similarity_strategy_priority_fields(self): + mock_describe_calls() + task_1 = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task_1.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task_1._init_task() + + task_2 = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task_2.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task_2._init_task() + + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + ], + status=200, + ) + responses.add( + responses.POST, + url=f"https://example.com/services/data/v{CURRENT_SF_API_VERSION}/composite/sobjects", + json=[{"id": "003000000000003", "success": True}], + status=200, + ) + step_1 = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task_1, + fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={"Name": "Name", "Email": "Email"}, + ) + + step_2 = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=task_2, + fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + selection_strategy=SelectStrategy.SIMILARITY, + selection_priority_fields={ + "Account.Name": "Account.Name", + "Account.AccountNumber": "Account.AccountNumber", + }, + ) + + sample_response = [ + { + "records": [ + { + "Id": "003000000000001", + "Name": "Bob The Builder", + "Email": "bob@yahoo.org", + "Account": { + "attributes": {"type": "Account"}, + "Name": "Jawad TP", + "AccountNumber": 567890, + }, + }, + { + "Id": "003000000000002", + "Name": "Tom Cruise", + "Email": "tom@exmaple.com", + "Account": { + "attributes": {"type": "Account"}, + "Name": "Aditya B", + "AccountNumber": 123456, + }, + }, + ], + "done": True, + }, + ] + + step_1.sf.restful = mock.Mock(side_effect=sample_response) + step_2.sf.restful = mock.Mock(side_effect=sample_response) + + records = iter( + [ + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456"], + ] + ) + records_1, records_2 = tee(records) + step_1.start() + step_1.select_records(records_1) + step_1.end() + + step_2.start() + step_2.select_records(records_2) + step_2.end() + + # Get the results and assert their properties + results_1 = list(step_1.get_results()) + results_2 = list(step_2.get_results()) + assert ( + len(results_1) == 1 + ) # Expect 1 results (matching the input records count) + assert ( + len(results_2) == 1 + ) # Expect 1 results (matching the input records count) + + # Assert that all results have the expected ID, success, and created values + # Prioritizes Name and Email + assert results_1[0] == DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + # Prioritizes Account.Name and Account.AccountNumber + assert results_2[0] == DataOperationResult( + id="003000000000002", success=True, error="", created=False + ) + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() @@ -2301,6 +2801,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, selection_strategy=SelectStrategy.SIMILARITY, selection_filter=None, + selection_priority_fields=None, + content_type=None, ) op = get_dml_operation( @@ -2324,6 +2826,8 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): context=context, selection_strategy=SelectStrategy.SIMILARITY, selection_filter=None, + selection_priority_fields=None, + content_type=None, ) @mock.patch("cumulusci.tasks.bulkdata.step.BulkApiDmlOperation") @@ -2488,99 +2992,120 @@ def test_cleanup_date_strings__upsert_update(self, operation): }, json_out -import pytest - -# def test_generate_user_filter_query_basic(): -# """Tests basic query generation without existing LIMIT or OFFSET.""" -# filter_clause = "WHERE Name = 'John'" -# sobject = "Account" -# fields = ["Id", "Name"] -# limit_clause = 10 -# offset_clause = 5 - -# expected_query = ( -# "SELECT Id, Name FROM Account WHERE Name = 'John' LIMIT 10 OFFSET 5" -# ) -# assert ( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# == expected_query -# ) - - -# def test_generate_user_filter_query_existing_limit(): -# """Tests handling of existing LIMIT in the filter clause.""" -# filter_clause = "WHERE Name = 'John' LIMIT 20" -# sobject = "Contact" -# fields = ["Id", "FirstName"] -# limit_clause = 5 # Should override the existing LIMIT -# offset_clause = None - -# expected_query = "SELECT Id, FirstName FROM Contact WHERE Name = 'John' LIMIT 5" -# assert ( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# == expected_query -# ) - - -# def test_generate_user_filter_query_existing_offset(): -# """Tests handling of existing OFFSET in the filter clause.""" -# filter_clause = "WHERE Name = 'John' OFFSET 15" -# sobject = "Opportunity" -# fields = ["Id", "Name"] -# limit_clause = None -# offset_clause = 10 # Should add to the existing OFFSET - -# expected_query = "SELECT Id, Name FROM Opportunity WHERE Name = 'John' OFFSET 25" -# assert ( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# == expected_query -# ) - - -# def test_generate_user_filter_query_no_limit_or_offset(): -# """Tests when no limit or offset is provided or present in the filter.""" -# filter_clause = "WHERE Name = 'John' LIMIT 5 OFFSET 20" -# sobject = "Lead" -# fields = ["Id", "Name", "Email"] -# limit_clause = None -# offset_clause = None - -# expected_query = ( -# "SELECT Id, Name, Email FROM Lead WHERE Name = 'John' LIMIT 5 OFFSET 20" -# ) -# print( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# ) -# assert ( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# == expected_query -# ) - - -# def test_generate_user_filter_query_case_insensitivity(): -# """Tests case-insensitivity for LIMIT and OFFSET.""" -# filter_clause = "where name = 'John' offset 5 limit 20" -# sobject = "Task" -# fields = ["Id", "Subject"] -# limit_clause = 15 -# offset_clause = 20 - -# expected_query = ( -# "SELECT Id, Subject FROM Task where name = 'John' LIMIT 15 OFFSET 25" -# ) -# assert ( -# generate_user_filter_query( -# filter_clause, sobject, fields, limit_clause, offset_clause -# ) -# == expected_query -# ) +@pytest.mark.parametrize( + "query_fields, expected", + [ + # Test with simple field names + (["Id", "Name", "Email"], ["Id", "Name", "Email"]), + # Test with TYPEOF fields (polymorphic fields) + ( + [ + "Subject", + { + "Who": [ + {"Contact": ["Name", "Email"]}, + {"Lead": ["Name", "Company"]}, + ] + }, + ], + [ + "Subject", + "Who.Contact.Name", + "Who.Contact.Email", + "Who.Lead.Name", + "Who.Lead.Company", + ], + ), + # Test with mixed simple and TYPEOF fields + ( + ["Subject", {"Who": [{"Contact": ["Email"]}]}, "Account.Name"], + ["Subject", "Who.Contact.Email", "Account.Name"], + ), + # Test with an empty list + ([], []), + ], +) +def test_extract_flattened_headers(query_fields, expected): + result = extract_flattened_headers(query_fields) + assert result == expected + + +@pytest.mark.parametrize( + "record, headers, expected", + [ + # Test with simple field matching + ( + {"Id": "001", "Name": "John Doe", "Email": "john@example.com"}, + ["Id", "Name", "Email"], + ["001", "John Doe", "john@example.com"], + ), + # Test with lookup fields and missing values + ( + { + "Who": { + "attributes": {"type": "Contact"}, + "Name": "Jane Doe", + "Email": "johndoe@org.com", + "Number": 10, + } + }, + ["Who.Contact.Name", "Who.Contact.Email", "Who.Contact.Number"], + ["Jane Doe", "johndoe@org.com", "10"], + ), + # Test with non-matching ref_obj type + ( + {"Who": {"attributes": {"type": "Contact"}, "Email": "jane@contact.com"}}, + ["Who.Lead.Email"], + [""], + ), + # Test with mixed fields and nested lookups + ( + { + "Who": {"attributes": {"type": "Lead"}, "Name": "John Doe"}, + "Email": "john@example.com", + }, + ["Who.Lead.Name", "Who.Lead.Company", "Email"], + ["John Doe", "", "john@example.com"], + ), + # Test with mixed fields and nested lookups + ( + { + "Who": {"attributes": {"type": "Lead"}, "Name": "John Doe"}, + "Email": "john@example.com", + }, + ["What.Account.Name"], + [""], + ), + # Test with empty record + ({}, ["Id", "Name"], ["", ""]), + ], +) +def test_flatten_record(record, headers, expected): + result = flatten_record(record, headers) + assert result == expected + + +@pytest.mark.parametrize( + "priority_fields, fields, expected", + [ + # Test with priority fields matching + ( + {"Id": "Id", "Name": "Name"}, + ["Id", "Name", "Email"], + [HIGH_PRIORITY_VALUE, HIGH_PRIORITY_VALUE, LOW_PRIORITY_VALUE], + ), + # Test with no priority fields provided + (None, ["Id", "Name", "Email"], [1, 1, 1]), + # Test with empty priority fields dictionary + ({}, ["Id", "Name", "Email"], [1, 1, 1]), + # Test with some fields not in priority_fields + ( + {"Id": "Id"}, + ["Id", "Name", "Email"], + [HIGH_PRIORITY_VALUE, LOW_PRIORITY_VALUE, LOW_PRIORITY_VALUE], + ), + ], +) +def test_assign_weights(priority_fields, fields, expected): + result = assign_weights(priority_fields, fields) + assert result == expected From 77e2fdcdc8bb0c068a5add61d193fee4b94af030 Mon Sep 17 00:00:00 2001 From: lakshmi2506 <141401869+lakshmi2506@users.noreply.github.com> Date: Mon, 14 Oct 2024 21:09:13 +0530 Subject: [PATCH 28/65] @W-16566993: Migrating From SFDX to SFCLI (#3829) [W-16566993](https://gus.lightning.force.com/a07EE00001zeux8YAA) Migrating from SFDX CLI to SF CLI --------- Co-authored-by: James Estevez --- cumulusci/cli/tests/test_org.py | 1 - cumulusci/core/config/scratch_org_config.py | 31 +++++++++---------- cumulusci/core/config/sfdx_org_config.py | 14 ++++----- .../config/tests/test_config_expensive.py | 18 +++++------ .../dependencies/tests/test_dependencies.py | 2 +- .../core/keychain/base_project_keychain.py | 8 ++--- cumulusci/core/sfdx.py | 14 ++++----- cumulusci/core/tests/test_sfdx.py | 9 +++--- cumulusci/cumulusci.yml | 8 ++--- cumulusci/tasks/command.py | 4 +-- cumulusci/tasks/connectedapp.py | 14 ++++----- cumulusci/tasks/dx_convert_from.py | 4 +-- cumulusci/tasks/salesforce/sourcetracking.py | 15 +++++---- .../tests/test_nonsourcetracking.py | 6 ++-- .../salesforce/tests/test_sourcetracking.py | 7 ++--- cumulusci/tasks/sfdx.py | 10 +++--- cumulusci/tasks/tests/test_command.py | 2 +- cumulusci/tasks/tests/test_connectedapp.py | 5 ++- cumulusci/tasks/tests/test_dx_convert_from.py | 2 +- cumulusci/tasks/tests/test_sfdx.py | 20 ++++++------ cumulusci/tasks/vlocity/tests/test_vlocity.py | 2 +- cumulusci/tasks/vlocity/vlocity.py | 4 +-- docs/env-var-reference.md | 3 +- docs/get-started.md | 12 +++---- docs/github-actions.md | 6 ++-- docs/headless.md | 2 +- docs/managed-2gp.md | 2 +- docs/scratch-orgs.md | 2 +- 28 files changed, 111 insertions(+), 116 deletions(-) diff --git a/cumulusci/cli/tests/test_org.py b/cumulusci/cli/tests/test_org.py index dfa7874c9e..c85f2e0507 100644 --- a/cumulusci/cli/tests/test_org.py +++ b/cumulusci/cli/tests/test_org.py @@ -730,7 +730,6 @@ def test_org_list(self, cli_tbl): ], title="Connected Orgs", ) - assert scratch_table_call in cli_tbl.call_args_list assert connected_table_call in cli_tbl.call_args_list runtime.keychain.cleanup_org_cache_dirs.assert_called_once() diff --git a/cumulusci/core/config/scratch_org_config.py b/cumulusci/core/config/scratch_org_config.py index edd0c18807..d79f7fcb66 100644 --- a/cumulusci/core/config/scratch_org_config.py +++ b/cumulusci/core/config/scratch_org_config.py @@ -61,7 +61,7 @@ def days_alive(self) -> Optional[int]: return delta.days + 1 def create_org(self) -> None: - """Uses sfdx force:org:create to create the org""" + """Uses sf org create scratch to create the org""" if not self.config_file: raise ScratchOrgException( f"Scratch org config {self.name} is missing a config_file" @@ -72,7 +72,7 @@ def create_org(self) -> None: args: List[str] = self._build_org_create_args() extra_args = os.environ.get("SFDX_ORG_CREATE_ARGS", "") p: sarge.Command = sfdx( - f"force:org:create --json {extra_args}", + f"org create scratch --json {extra_args}", args=args, username=None, log_note="Creating scratch org", @@ -139,33 +139,32 @@ def _build_org_create_args(self) -> List[str]: args = ["-f", self.config_file, "-w", "120"] devhub_username: Optional[str] = self._choose_devhub_username() if devhub_username: - args += ["--targetdevhubusername", devhub_username] + args += ["--target-dev-hub", devhub_username] if not self.namespaced: - args += ["-n"] + args += ["--no-namespace"] if self.noancestors: - args += ["--noancestors"] + args += ["--no-ancestors"] if self.days: - args += ["--durationdays", str(self.days)] + args += ["--duration-days", str(self.days)] if self.release: - args += [f"release={self.release}"] + args += [f"--release={self.release}"] if self.sfdx_alias: args += ["-a", self.sfdx_alias] with open(self.config_file, "r") as org_def: org_def_data = json.load(org_def) org_def_has_email = "adminEmail" in org_def_data if self.email_address and not org_def_has_email: - args += [f"adminEmail={self.email_address}"] + args += [f"--admin-email={self.email_address}"] if self.default: - args += ["-s"] - if instance := self.instance or os.environ.get("SFDX_SIGNUP_INSTANCE"): - args += [f"instance={instance}"] + args += ["--set-default"] + return args def _choose_devhub_username(self) -> Optional[str]: """Determine which devhub username to specify when calling sfdx, if any.""" # If a devhub was specified via `cci org scratch`, use it. # (This will return None if "devhub" isn't set in the org config, - # in which case sfdx will use its defaultdevhubusername.) + # in which case sf will use its target-dev-hub.) devhub_username = self.devhub if not devhub_username and self.keychain is not None: # Otherwise see if one is configured via the "devhub" service @@ -178,7 +177,7 @@ def _choose_devhub_username(self) -> Optional[str]: return devhub_username def generate_password(self) -> None: - """Generates an org password with: sfdx force:user:password:generate. + """Generates an org password with: sf org generate password. On a non-zero return code, set the password_failed in our config and log the output (stdout/stderr) from sfdx.""" @@ -187,7 +186,7 @@ def generate_password(self) -> None: return p: sarge.Command = sfdx( - "force:user:password:generate", + "org generate password", self.username, log_note="Generating scratch org user password", ) @@ -214,13 +213,13 @@ def can_delete(self) -> bool: return bool(self.date_created) def delete_org(self) -> None: - """Uses sfdx force:org:delete to delete the org""" + """Uses sf org delete scratch to delete the org""" if not self.created: self.logger.info("Skipping org deletion: the scratch org does not exist.") return p: sarge.Command = sfdx( - "force:org:delete -p", self.username, "Deleting scratch org" + "org delete scratch -p", self.username, "Deleting scratch org" ) sfdx_output: List[str] = list(p.stdout_text) + list(p.stderr_text) diff --git a/cumulusci/core/config/sfdx_org_config.py b/cumulusci/core/config/sfdx_org_config.py index c466778047..dcf79b0a1b 100644 --- a/cumulusci/core/config/sfdx_org_config.py +++ b/cumulusci/core/config/sfdx_org_config.py @@ -27,9 +27,9 @@ def sfdx_info(self): if not self.print_json: self.logger.info(f"Getting org info from Salesforce CLI for {username}") - # Call force:org:display and parse output to get instance_url and + # Call org display and parse output to get instance_url and # access_token - p = sfdx("force:org:display --json", self.username) + p = sfdx("org display --json", self.username) org_info = None stderr_list = [line.strip() for line in p.stderr_text] @@ -166,7 +166,7 @@ def get_access_token(self, **userfields): else: username = result[0]["Username"] - p = sfdx(f"force:org:display --targetusername={username} --json") + p = sfdx(f"org display --target-org={username} --json") if p.returncode: output = p.stdout_text.read() try: @@ -183,9 +183,9 @@ def get_access_token(self, **userfields): return info["result"]["accessToken"] def force_refresh_oauth_token(self): - # Call force:org:display and parse output to get instance_url and + # Call org display and parse output to get instance_url and # access_token - p = sfdx("force:org:open -r", self.username, log_note="Refreshing OAuth token") + p = sfdx("org open -r", self.username, log_note="Refreshing OAuth token") stdout_list = [line.strip() for line in p.stdout_text] @@ -198,7 +198,7 @@ def force_refresh_oauth_token(self): # Added a print json argument to check whether it is there or not def refresh_oauth_token(self, keychain, print_json=False): - """Use sfdx force:org:describe to refresh token instead of built in OAuth handling""" + """Use sfdx org display to refresh token instead of built in OAuth handling""" if hasattr(self, "_sfdx_info"): # Cache the sfdx_info for 1 hour to avoid unnecessary calls out to sfdx CLI delta = datetime.datetime.utcnow() - self._sfdx_info_date @@ -208,7 +208,7 @@ def refresh_oauth_token(self, keychain, print_json=False): # Force a token refresh self.force_refresh_oauth_token() self.print_json = print_json - # Get org info via sfdx force:org:display + # Get org info via sf org display self.sfdx_info # Get additional org info by querying API self._load_orginfo() diff --git a/cumulusci/core/config/tests/test_config_expensive.py b/cumulusci/core/config/tests/test_config_expensive.py index 7c3e879fca..5003d4eceb 100644 --- a/cumulusci/core/config/tests/test_config_expensive.py +++ b/cumulusci/core/config/tests/test_config_expensive.py @@ -376,7 +376,7 @@ def test_get_access_token(self, Command): with mock.patch("cumulusci.core.config.sfdx_org_config.sfdx", sfdx): access_token = config.get_access_token(alias="dadvisor") sfdx.assert_called_once_with( - "force:org:display --targetusername=whatever@example.com --json" + "org display --target-org=whatever@example.com --json" ) assert access_token == "the-token" @@ -792,7 +792,6 @@ def test_build_org_create_args(self, scratch_def_file): "noancestors": True, "sfdx_alias": "project__org", "default": True, - "instance": "NA01", "release": "previous", }, "test", @@ -804,18 +803,17 @@ def test_build_org_create_args(self, scratch_def_file): "tmp.json", "-w", "120", - "--targetdevhubusername", + "--target-dev-hub", "fake@fake.devhub", - "-n", - "--noancestors", - "--durationdays", + "--no-namespace", + "--no-ancestors", + "--duration-days", "1", - "release=previous", + "--release=previous", "-a", "project__org", - "adminEmail=test@example.com", - "-s", - "instance=NA01", + "--admin-email=test@example.com", + "--set-default", ] def test_build_org_create_args__email_in_scratch_def(self): diff --git a/cumulusci/core/dependencies/tests/test_dependencies.py b/cumulusci/core/dependencies/tests/test_dependencies.py index 6462e440f5..3c9a2b8f0b 100644 --- a/cumulusci/core/dependencies/tests/test_dependencies.py +++ b/cumulusci/core/dependencies/tests/test_dependencies.py @@ -861,7 +861,7 @@ def test_get_metadata_package_zip_builder__sfdx( context=mock.ANY, ) sfdx_mock.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-r", "force-app"], capture_output=True, check_return=True, diff --git a/cumulusci/core/keychain/base_project_keychain.py b/cumulusci/core/keychain/base_project_keychain.py index 561caeaec7..cb1f99a2f8 100644 --- a/cumulusci/core/keychain/base_project_keychain.py +++ b/cumulusci/core/keychain/base_project_keychain.py @@ -96,11 +96,7 @@ def set_default_org(self, name): org.config["default"] = True org.save() if org.created: - sfdx( - sarge.shell_format( - "force:config:set defaultusername={}", org.sfdx_alias - ) - ) + sfdx(sarge.shell_format("force config set target-org={}", org.sfdx_alias)) def unset_default_org(self): """unset the default orgs for tasks""" @@ -110,7 +106,7 @@ def unset_default_org(self): if org_config.default: del org_config.config["default"] org_config.save() - sfdx("force:config:set defaultusername=") + sfdx("config unset target-org=") # This implementation of get_default_org, set_default_org, and unset_default_org # is currently kept for backwards compatibility, but EncryptedFileProjectKeychain diff --git a/cumulusci/core/sfdx.py b/cumulusci/core/sfdx.py index d1c8fd01d7..3058fc80ac 100644 --- a/cumulusci/core/sfdx.py +++ b/cumulusci/core/sfdx.py @@ -35,17 +35,17 @@ def sfdx( Returns a `sarge` Command instance with returncode, stdout, stderr """ - command = f"sfdx {command}" + command = f"sf {command}" if args is not None: for arg in args: command += " " + shell_quote(arg) if username: - command += f" -u {shell_quote(username)}" + command += f" -o {shell_quote(username)}" if log_note: logger.info(f"{log_note} with command: {command}") # Avoid logging access token if access_token: - command += f" -u {shell_quote(access_token)}" + command += f" -o {shell_quote(access_token)}" env = env or {} p = sarge.Command( command, @@ -86,15 +86,15 @@ def shell_quote(s: str): def get_default_devhub_username(): p = sfdx( - "force:config:get defaultdevhubusername --json", + "config get target-dev-hub --json", log_note="Getting default Dev Hub username from sfdx", check_return=True, ) result = json.load(p.stdout_text) if "result" not in result or "value" not in result["result"][0]: raise SfdxOrgException( - "No sfdx config found for defaultdevhubusername. " - "Please use the sfdx force:config:set to set the defaultdevhubusername and run again." + "No sf config found for target-dev-hub. " + "Please use the sf config set to set the target-dev-hub and run again." ) username = result["result"][0]["value"] return username @@ -145,7 +145,7 @@ def convert_sfdx_source( if name: args += ["-n", name] sfdx( - "force:source:convert", + "project convert source", args=args, capture_output=True, check_return=True, diff --git a/cumulusci/core/tests/test_sfdx.py b/cumulusci/core/tests/test_sfdx.py index 0d6661284e..205996f296 100644 --- a/cumulusci/core/tests/test_sfdx.py +++ b/cumulusci/core/tests/test_sfdx.py @@ -23,14 +23,15 @@ class TestSfdx: def test_posix_quoting(self, Command): sfdx("cmd", args=["a'b"]) cmd = Command.call_args[0][0] - assert cmd == r"sfdx cmd 'a'\''b'" + assert cmd == r"sf cmd 'a'\''b'" @mock.patch("platform.system", mock.Mock(return_value="Windows")) @mock.patch("sarge.Command") def test_windows_quoting(self, Command): sfdx("cmd", args=['a"b'], access_token="token") cmd = Command.call_args[0][0] - assert cmd == r'sfdx cmd "a\"b" -u token' + print(cmd) + assert cmd == r'sf cmd "a\"b" -o token' @mock.patch("platform.system", mock.Mock(return_value="Windows")) def test_shell_quote__str_with_space(self): @@ -93,7 +94,7 @@ def test_convert_sfdx(): assert p is not None sfdx.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-r", path, "-n", "Test Package"], capture_output=True, check_return=True, @@ -109,7 +110,7 @@ def test_convert_sfdx__cwd(): assert p is not None sfdx.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", mock.ANY, "-n", "Test Package"], capture_output=True, check_return=True, diff --git a/cumulusci/cumulusci.yml b/cumulusci/cumulusci.yml index 1a2354e28a..70194abb84 100644 --- a/cumulusci/cumulusci.yml +++ b/cumulusci/cumulusci.yml @@ -271,14 +271,14 @@ tasks: path: unpackaged/config/qa group: Salesforce Metadata dx: - description: Execute an arbitrary Salesforce DX command against an org. Use the 'command' option to specify the command, such as 'force:package:install' + description: Execute an arbitrary Salesforce DX command against an org. Use the 'command' option to specify the command, such as 'package install' class_path: cumulusci.tasks.sfdx.SFDXOrgTask group: Salesforce DX dx_convert_to: description: Converts src directory metadata format into sfdx format under force-app class_path: cumulusci.tasks.sfdx.SFDXBaseTask options: - command: "force:mdapi:convert -r src" + command: "project convert mdapi -r src" group: Salesforce DX dx_convert_from: description: Converts force-app directory in sfdx format into metadata format under src @@ -290,13 +290,13 @@ tasks: description: Uses sfdx to pull from a scratch org into the force-app directory class_path: cumulusci.tasks.sfdx.SFDXOrgTask options: - command: "force:source:pull" + command: "project retrieve start --ignore-conflicts" group: Salesforce DX dx_push: description: Uses sfdx to push the force-app directory metadata into a scratch org class_path: cumulusci.tasks.sfdx.SFDXOrgTask options: - command: "force:source:push" + command: "project deploy start --ignore-conflicts" group: Salesforce DX enable_einstein_prediction: description: Enable an Einstein Prediction Builder prediction. diff --git a/cumulusci/tasks/command.py b/cumulusci/tasks/command.py index 2de71f8577..1935fc3e6f 100644 --- a/cumulusci/tasks/command.py +++ b/cumulusci/tasks/command.py @@ -146,7 +146,7 @@ class SalesforceCommand(Command): """Execute a Command with SF credentials provided on the environment. Provides: - * SF_INSTANCE_URL + * SF_ORG_INSTANCE_URL * SF_ACCESS_TOKEN """ @@ -158,7 +158,7 @@ def _update_credentials(self): def _get_env(self): env = super(SalesforceCommand, self)._get_env() env["SF_ACCESS_TOKEN"] = self.org_config.access_token - env["SF_INSTANCE_URL"] = self.org_config.instance_url + env["SF_ORG_INSTANCE_URL"] = self.org_config.instance_url return env diff --git a/cumulusci/tasks/connectedapp.py b/cumulusci/tasks/connectedapp.py index 2566e3b13a..706c8132fc 100644 --- a/cumulusci/tasks/connectedapp.py +++ b/cumulusci/tasks/connectedapp.py @@ -47,7 +47,7 @@ class CreateConnectedApp(SFDXBaseTask): "description": "The email address to associate with the connected app. Defaults to email address from the github service if configured." }, "username": { - "description": "Create the connected app in a different org. Defaults to the defaultdevhubusername configured in sfdx.", + "description": "Create the connected app in a different org. Defaults to the target-dev-hub configured in sfdx.", "required": False, }, "connect": { @@ -63,7 +63,7 @@ class CreateConnectedApp(SFDXBaseTask): def _init_options(self, kwargs): self.client_id = None self.client_secret = None - kwargs["command"] = "force:mdapi:deploy --wait {}".format(self.deploy_wait) + kwargs["command"] = "project deploy start --wait {}".format(self.deploy_wait) super(CreateConnectedApp, self)._init_options(kwargs) # Validate label @@ -91,7 +91,7 @@ def _set_default_username(self): self.logger.info("Getting username for the default devhub from sfdx") output = [] self._run_command( - command="{} force:config:get defaultdevhubusername --json".format(SFDX_CLI), + command="{} force config get target-dev-hub --json".format(SFDX_CLI), env=self._get_env(), output_handler=output.append, ) @@ -109,7 +109,7 @@ def _process_devhub_output(self, output): data = self._process_json_output(output) if "value" not in data["result"][0]: raise TaskOptionsError( - "No sfdx config found for defaultdevhubusername. Please use the sfdx force:config:set to set the defaultdevhubusername and run again" + "No sfdx config found for target-dev-hub. Please use the sf force config set to set the target-dev-hub and run again" ) self.options["username"] = data["result"][0]["value"] @@ -166,11 +166,11 @@ def _connect_service(self): def _get_command(self): command = super()._get_command() - # Default to sfdx defaultdevhubusername + # Default to sf target-dev-hub if "username" not in self.options: self._set_default_username() - command += " -u {}".format(self.options.get("username")) - command += " -d {}".format(self.tempdir) + command += " -o {}".format(self.options.get("username")) + command += " --metadata-dir {}".format(self.tempdir) return command def _run_task(self): diff --git a/cumulusci/tasks/dx_convert_from.py b/cumulusci/tasks/dx_convert_from.py index ff242ce91e..45d9f1bd0a 100644 --- a/cumulusci/tasks/dx_convert_from.py +++ b/cumulusci/tasks/dx_convert_from.py @@ -16,8 +16,8 @@ class DxConvertFrom(SFDXBaseTask): def _init_options(self, kwargs): super()._init_options(kwargs) - # append command -d option to sfdx} force:source:convert - self.options["command"] = f"force:source:convert -d {self.options['src_dir']}" + # append command -d option to sf} project convert source + self.options["command"] = f"project convert source -d {self.options['src_dir']}" def _run_task(self): src_dir = Path(self.options["src_dir"]) diff --git a/cumulusci/tasks/salesforce/sourcetracking.py b/cumulusci/tasks/salesforce/sourcetracking.py index 5dc9a83795..27567ddb20 100644 --- a/cumulusci/tasks/salesforce/sourcetracking.py +++ b/cumulusci/tasks/salesforce/sourcetracking.py @@ -150,7 +150,7 @@ def _reset_sfdx_snapshot(self): self.org_config, ScratchOrgConfig ): sfdx( - "force:source:tracking:reset", + "project reset tracking", args=["-p"], username=self.org_config.username, capture_output=True, @@ -229,7 +229,7 @@ def retrieve_components( ): """Retrieve specified components from an org into a target folder. - Retrieval is done using the sfdx force:source:retrieve command. + Retrieval is done using the sf project retrieve start command. Set `md_format` to True if retrieving into a folder with a package in metadata format. In this case the folder will be temporarily @@ -240,7 +240,6 @@ def retrieve_components( target = os.path.realpath(target) profiles = [] - # If retrieve_complete_profile and project_config is None, raise error # This is because project_config is only required if retrieve_complete_profile is True if retrieve_complete_profile and project_config is None: @@ -274,7 +273,7 @@ def retrieve_components( {"packageDirectories": [{"path": "force-app", "default": True}]}, f ) sfdx( - "force:mdapi:convert", + "project convert mdapi", log_note="Converting to DX format", args=["-r", target, "-d", "force-app"], check_return=True, @@ -292,7 +291,7 @@ def retrieve_components( # Retrieve specified components in DX format p = sfdx( - "force:source:retrieve", + "project retrieve start", access_token=org_config.access_token, log_note="Retrieving components", args=[ @@ -302,10 +301,11 @@ def retrieve_components( os.path.join(package_xml_path, "package.xml"), "-w", "5", + "--ignore-conflicts", ], capture_output=capture_output, check_return=True, - env={"SFDX_INSTANCE_URL": org_config.instance_url}, + env={"SF_ORG_INSTANCE_URL": org_config.instance_url}, ) # Extract Profiles @@ -321,11 +321,10 @@ def retrieve_components( task_config=task_config, ) cls_retrieve_profile() - if md_format: # Convert back to metadata format sfdx( - "force:source:convert", + "project convert source", log_note="Converting back to metadata format", args=["-r", "force-app", "-d", target], capture_output=capture_output, diff --git a/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py b/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py index 2dc1c1142e..066c371943 100644 --- a/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py +++ b/cumulusci/tasks/salesforce/tests/test_nonsourcetracking.py @@ -215,9 +215,9 @@ def test_run_task(self, sfdx, create_task_fixture): assert "SharingRules: alpha" in messages assert "SharingRules: BusinessBrand" not in messages assert sfdx_calls == [ - "force:mdapi:convert", - "force:source:retrieve", - "force:source:convert", + "project convert mdapi", + "project retrieve start", + "project convert source", ] assert os.path.exists(os.path.join("src", "package.xml")) diff --git a/cumulusci/tasks/salesforce/tests/test_sourcetracking.py b/cumulusci/tasks/salesforce/tests/test_sourcetracking.py index 97583af20c..258d1a1e32 100644 --- a/cumulusci/tasks/salesforce/tests/test_sourcetracking.py +++ b/cumulusci/tasks/salesforce/tests/test_sourcetracking.py @@ -188,11 +188,10 @@ def test_run_task(self, sfdx, create_task_fixture): pathlib.Path, "is_dir", return_value=True ): task._run_task() - assert sfdx_calls == [ - "force:mdapi:convert", - "force:source:retrieve", - "force:source:convert", + "project convert mdapi", + "project retrieve start", + "project convert source", ] assert os.path.exists(os.path.join("src", "package.xml")) mock_retrieve_profile.assert_called() diff --git a/cumulusci/tasks/sfdx.py b/cumulusci/tasks/sfdx.py index 612c1376f8..717fc26570 100644 --- a/cumulusci/tasks/sfdx.py +++ b/cumulusci/tasks/sfdx.py @@ -17,7 +17,7 @@ from cumulusci.core.tasks import BaseSalesforceTask from cumulusci.tasks.command import Command -SFDX_CLI = "sfdx" +SFDX_CLI = "sf" class SFDXBaseTask(Command): @@ -47,20 +47,20 @@ def _get_command(self): command = super()._get_command() # For scratch orgs, just pass the username in the command line if isinstance(self.org_config, ScratchOrgConfig): - command += " -u {username}".format(username=self.org_config.username) + command += " -o {username}".format(username=self.org_config.username) return command def _get_env(self): env = super(SFDXOrgTask, self)._get_env() if not isinstance(self.org_config, ScratchOrgConfig): # For non-scratch keychain orgs, pass the access token via env var - env["SFDX_INSTANCE_URL"] = self.org_config.instance_url - env["SFDX_DEFAULTUSERNAME"] = self.org_config.access_token + env["SF_ORG_INSTANCE_URL"] = self.org_config.instance_url + env["SF_TARGET_ORG"] = self.org_config.access_token return env class SFDXJsonTask(SFDXOrgTask): - command = "force:mdapi:deploy --json" + command = "project deploy start --json" task_options = { "extra": {"description": "Append additional options to the command"} diff --git a/cumulusci/tasks/tests/test_command.py b/cumulusci/tasks/tests/test_command.py index bea9df4fb6..128353f08c 100644 --- a/cumulusci/tasks/tests/test_command.py +++ b/cumulusci/tasks/tests/test_command.py @@ -126,4 +126,4 @@ def test_get_env(self): task = SalesforceCommand(self.project_config, self.task_config, self.org_config) env = task._get_env() assert "SF_ACCESS_TOKEN" in env - assert "SF_INSTANCE_URL" in env + assert "SF_ORG_INSTANCE_URL" in env diff --git a/cumulusci/tasks/tests/test_connectedapp.py b/cumulusci/tasks/tests/test_connectedapp.py index 3f4c4666b8..81e6f765eb 100644 --- a/cumulusci/tasks/tests/test_connectedapp.py +++ b/cumulusci/tasks/tests/test_connectedapp.py @@ -97,7 +97,10 @@ def test_get_command(self, run_command_mock): ) task.tempdir = "asdf" command = task._get_command() - assert command == "sfdx force:mdapi:deploy --wait 5 -u username -d asdf" + assert ( + command + == "sf project deploy start --wait 5 -o username --metadata-dir asdf" + ) def test_process_json_output(self): """_process_json_output returns valid json""" diff --git a/cumulusci/tasks/tests/test_dx_convert_from.py b/cumulusci/tasks/tests/test_dx_convert_from.py index a966fc7c1b..5c7b3f35bb 100644 --- a/cumulusci/tasks/tests/test_dx_convert_from.py +++ b/cumulusci/tasks/tests/test_dx_convert_from.py @@ -51,7 +51,7 @@ def test_dx_convert_from(sarge, sarge_process, dx_convert_task): assert not src_dir.exists() sarge.Command.assert_called_once_with( - "sfdx force:source:convert -d src", + "sf project convert source -d src", cwd=".", env=ANY, shell=True, diff --git a/cumulusci/tasks/tests/test_sfdx.py b/cumulusci/tasks/tests/test_sfdx.py index b2e443e45b..513793cc08 100644 --- a/cumulusci/tasks/tests/test_sfdx.py +++ b/cumulusci/tasks/tests/test_sfdx.py @@ -37,7 +37,7 @@ def setup_method(self): def test_base_task(self): """The command is prefixed w/ sfdx""" - self.task_config.config["options"] = {"command": "force:org", "extra": "--help"} + self.task_config.config["options"] = {"command": "org", "extra": "--help"} task = SFDXBaseTask(self.project_config, self.task_config) try: @@ -45,8 +45,8 @@ def test_base_task(self): except CommandException: pass - assert task.options["command"] == "force:org" - assert task._get_command() == "sfdx force:org --help" + assert task.options["command"] == "org" + assert task._get_command() == "sf org --help" @patch("cumulusci.tasks.command.Command._run_task", MagicMock(return_value=None)) def test_keychain_org_creds(self): @@ -71,24 +71,26 @@ def refresh_oauth_token(keychain): task() org_config.refresh_oauth_token.assert_called_once() - assert "SFDX_INSTANCE_URL" in task._get_env() - assert "SFDX_DEFAULTUSERNAME" in task._get_env() - assert access_token in task._get_env()["SFDX_DEFAULTUSERNAME"] + print(task._get_env()) + assert "SF_ORG_INSTANCE_URL" in task._get_env() + assert "SF_TARGET_ORG" in task._get_env() + assert access_token in task._get_env()["SF_TARGET_ORG"] def test_scratch_org_username(self): """Scratch Org credentials are passed by -u flag""" - self.task_config.config["options"] = {"command": "force:org --help"} + self.task_config.config["options"] = {"command": "org --help"} org_config = ScratchOrgConfig({"username": "test@example.com"}, "test") task = SFDXOrgTask(self.project_config, self.task_config, org_config) - assert "-u test@example.com" in task._get_command() + assert "-o test@example.com" in task._get_command() class TestSFDXJsonTask: def test_get_command(self): task = create_task(SFDXJsonTask) command = task._get_command() - assert command == "sfdx force:mdapi:deploy --json" + print(command) + assert command == "sf project deploy start --json" def test_process_output(self): task = create_task(SFDXJsonTask) diff --git a/cumulusci/tasks/vlocity/tests/test_vlocity.py b/cumulusci/tasks/vlocity/tests/test_vlocity.py index 7fa45536ad..e30be94ab2 100644 --- a/cumulusci/tasks/vlocity/tests/test_vlocity.py +++ b/cumulusci/tasks/vlocity/tests/test_vlocity.py @@ -247,7 +247,7 @@ def test_deploy_omni_studio_site_settings( # The frequent error is: # # "name": "NoOrgFound", -# "action": "Run the \"sfdx force:auth\" commands with --setdefaultusername to connect to an org and set it as your default org.\nRun \"force:org:create\" with --setdefaultusername to create a scratch org and set it as your default org.\nRun \"sfdx force:config:set defaultusername=\" to set your default username." +# "action": "Run the \"sfdx force:auth\" commands with --target-org to connect to an org and set it as your default org.\nRun \"org create scratch\" with --target-org to create a scratch org and set it as your default org.\nRun \"sf config set target-org=\" to set your default username." # } diff --git a/cumulusci/tasks/vlocity/vlocity.py b/cumulusci/tasks/vlocity/vlocity.py index 2071a61c83..c1d0f68085 100644 --- a/cumulusci/tasks/vlocity/vlocity.py +++ b/cumulusci/tasks/vlocity/vlocity.py @@ -105,8 +105,8 @@ def _add_token_to_sfdx(self, access_token: str, instance_url: str) -> str: """ # TODO: Use the sf v2 form of this command instead (when we migrate) token_store_cmd = [ - "sfdx", - "force:auth:accesstoken:store", + "sf", + "org login access-token", "--no-prompt", "--alias", f"{VBT_SF_ALIAS}", diff --git a/docs/env-var-reference.md b/docs/env-var-reference.md index b9f5628e5c..cc01c8a798 100644 --- a/docs/env-var-reference.md +++ b/docs/env-var-reference.md @@ -69,5 +69,4 @@ org, e.g. a Dev Hub. Set with SFDX_CLIENT_ID. ## `SFDX_ORG_CREATE_ARGS` -Extra arguments passed to `sfdx force:org:create`. Can be used to pass -key-value pairs. +Extra arguments passed to `sf org create scratch`. diff --git a/docs/get-started.md b/docs/get-started.md index 545f22d57a..d3b702d581 100644 --- a/docs/get-started.md +++ b/docs/get-started.md @@ -180,12 +180,12 @@ To set up Salesforce DX: Org](https://developer.salesforce.com/docs/atlas.en-us.228.0.sfdx_dev.meta/sfdx_dev/sfdx_setup_enable_devhub.htm) 3. [Connect SFDX to Your Dev Hub Org](https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_auth_web_flow.htm) - - Be sure to use the `--setdefaultdevhubusername` option! + Be sure to use the `--set-default-dev-hub` option! -If you have the `sfdx` command installed, are connected to your Dev Hub, -and set the `defaultdevhubusername` config setting (use -`sfdx force:config:list` to verify), you're now ready to use `cci` with -`sfdx` to build scratch orgs. +If you have the `sf` command installed, are connected to your Dev Hub, +and set the `target-dev-hub` config setting (use +`sf config list` to verify), you're now ready to use `cci` with +`sf` to build scratch orgs. ```{important} SFDX supports multiple Dev Hubs, so CumulusCI uses the one set as @@ -464,7 +464,7 @@ package namespace matches the namespace you entered when running command to extract your package metadata. ```console -$ sfdx force:source:retrieve -n package_name /path/to/project/ +$ sf project retrieve start -n package_name /path/to/project/ ``` That's it! You now have all of the metadata you care about in a single diff --git a/docs/github-actions.md b/docs/github-actions.md index f6950f6e70..08945a268d 100644 --- a/docs/github-actions.md +++ b/docs/github-actions.md @@ -181,15 +181,15 @@ The Cumulus Suite Actions **require CumulusCI 3.61.1 or greater** for any operat All Actions that interact with persistent orgs (such as a packaging org or Dev Hub) authorize those orgs using SFDX Auth URLs. These URLs are obtained via by first authorizing an org to the CLI: -`sfdx auth:web:login -a packaging` +`sf org login web -a packaging` and then retrieving the auth URL from the JSON output of the command -`sfdx force:org:display --json --verbose` +`sf org display --json --verbose` under the key `sfdxAuthUrl` under `result`. -If you have `jq` installed, you can do `sfdx force:org:display -u packaging-gh --json --verbose | jq -r .result.sfdxAuthUrl`. +If you have `jq` installed, you can do `sf org display -u packaging-gh --json --verbose | jq -r .result.sfdxAuthUrl`. First-generation package projects will have two auth-URL secrets, for the packaging org and for the Dev Hub. Second-generation and Unlocked package projects will have at least one auth-URL secret, for the Dev Hub, and may have diff --git a/docs/headless.md b/docs/headless.md index ae43464008..78fb913152 100644 --- a/docs/headless.md +++ b/docs/headless.md @@ -121,7 +121,7 @@ and then use it directly from CumulusCI. To do so, follow these steps. 1. Retrieve your auth URL. -1. Authorize the org using `sfdx auth:sfdxurl:store`. +1. Authorize the org using `sf org login sfdx-url`. 1. Run `cci org import `. ### JWT Flow Authorization diff --git a/docs/managed-2gp.md b/docs/managed-2gp.md index 840de68506..83b77f70df 100644 --- a/docs/managed-2gp.md +++ b/docs/managed-2gp.md @@ -170,7 +170,7 @@ the GitHub release operations: $ cci task run promote_package_version --version_id 04t000000000000 --promote_dependencies True ``` -Alternatively, you can use the `sfdx force:package:version:promote` +Alternatively, you can use the `sf package version promote` command to promote a 2GP package. Note that using this command will also not perform any release operations in GitHub. diff --git a/docs/scratch-orgs.md b/docs/scratch-orgs.md index 93e46f1203..83b5c90fa5 100644 --- a/docs/scratch-orgs.md +++ b/docs/scratch-orgs.md @@ -115,7 +115,7 @@ Scratch org limits are based on your Dev Hub's edition and your Salesforce contract. To review limits and consumption, run the command: ```console -$ sfdx force:limits:api:display -u +$ sf org list limits --target-org ``` `` is your Dev Hub username. The limit names are From 95177add386f907011b39901a2e29891a9c228ce Mon Sep 17 00:00:00 2001 From: James Estevez Date: Mon, 11 Nov 2024 20:18:57 -0800 Subject: [PATCH 29/65] Fix sf config unset command --- cumulusci/core/keychain/base_project_keychain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cumulusci/core/keychain/base_project_keychain.py b/cumulusci/core/keychain/base_project_keychain.py index cb1f99a2f8..0c1d0a6763 100644 --- a/cumulusci/core/keychain/base_project_keychain.py +++ b/cumulusci/core/keychain/base_project_keychain.py @@ -106,7 +106,7 @@ def unset_default_org(self): if org_config.default: del org_config.config["default"] org_config.save() - sfdx("config unset target-org=") + sfdx("config unset target-org") # This implementation of get_default_org, set_default_org, and unset_default_org # is currently kept for backwards compatibility, but EncryptedFileProjectKeychain From 4820442ede6f03e0128b4d1f8ff33445296dc1dd Mon Sep 17 00:00:00 2001 From: James Estevez Date: Mon, 11 Nov 2024 20:21:44 -0800 Subject: [PATCH 30/65] Remove dx_pull and dx_push tasks --- cumulusci/cumulusci.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/cumulusci/cumulusci.yml b/cumulusci/cumulusci.yml index 70194abb84..8259a6fa50 100644 --- a/cumulusci/cumulusci.yml +++ b/cumulusci/cumulusci.yml @@ -286,18 +286,6 @@ tasks: options: src_dir: src group: Salesforce DX - dx_pull: - description: Uses sfdx to pull from a scratch org into the force-app directory - class_path: cumulusci.tasks.sfdx.SFDXOrgTask - options: - command: "project retrieve start --ignore-conflicts" - group: Salesforce DX - dx_push: - description: Uses sfdx to push the force-app directory metadata into a scratch org - class_path: cumulusci.tasks.sfdx.SFDXOrgTask - options: - command: "project deploy start --ignore-conflicts" - group: Salesforce DX enable_einstein_prediction: description: Enable an Einstein Prediction Builder prediction. class_path: cumulusci.tasks.salesforce.enable_prediction.EnablePrediction From e0b5630b2f91e995c4fdfc98bff8fc3d6ef461c3 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Tue, 12 Nov 2024 11:40:29 -0800 Subject: [PATCH 31/65] Remove deprecation warning --- cumulusci/utils/yaml/cumulusci_yml.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/cumulusci/utils/yaml/cumulusci_yml.py b/cumulusci/utils/yaml/cumulusci_yml.py index f8498ed9ea..93516ed654 100644 --- a/cumulusci/utils/yaml/cumulusci_yml.py +++ b/cumulusci/utils/yaml/cumulusci_yml.py @@ -278,9 +278,6 @@ class ErrorDict(TypedDict): type: str -has_shown_yaml_error_message = False - - def _log_yaml_errors(logger, errors: List[ErrorDict]): "Format and log a Pydantic-style error dictionary" global has_shown_yaml_error_message @@ -289,18 +286,6 @@ def _log_yaml_errors(logger, errors: List[ErrorDict]): for error in errors: loc = " -> ".join(repr(x) for x in error["loc"] if x != "__root__") logger.warning(" %s\n %s", loc, error["msg"]) - if not has_shown_yaml_error_message: - logger.error( - "NOTE: These warnings will become errors on Sept 30, 2022.\n\n" - "If you need to put non-standard data in your CumulusCI file " - "(for some form of project-specific setting), put it in " - "the `project: custom:` section of `cumulusci.yml` ." - ) - logger.error( - "If you think your YAML has no error, please report the bug to the CumulusCI team." - ) - logger.error("https://github.com/SFDO-Tooling/CumulusCI/issues/\n") - has_shown_yaml_error_message = True def cci_safe_load( From 0ecf5f43bebca7ac755badd5cd8533f259a4838a Mon Sep 17 00:00:00 2001 From: James Estevez Date: Tue, 12 Nov 2024 12:38:38 -0800 Subject: [PATCH 32/65] Release v4.0.0 --- cumulusci/__about__.py | 2 +- docs/history.md | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index 2eb95d97e3..ce1305bf4e 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "3.93.0" +__version__ = "4.0.0" diff --git a/docs/history.md b/docs/history.md index 654b7e1d61..536b806184 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,6 +2,20 @@ +## v4.0.0 (2024-11-12) + +## What's Changed + +### Critical Changes 🛠 + +- Python versions 3.8, 3.9, and 3.10 are no longer supported. +- Switch to `sf` CLI commands [@lakshmi2506](https://github.com/lakshmi2506) in [#3829](https://github.com/SFDO-Tooling/CumulusCI/pull/3829) + - Removed the `dx_pull` and `dx_push` tasks as the underlying commands were removed from `sf` + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.92.0...v4.0.0 + + + ## v3.93.0 (2024-10-17) @@ -21,8 +35,6 @@ **Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.92.0...v3.93.0 - - ## v3.92.0 (2024-09-18) From c2c7caeb290c6c17025c2eb63b8d7f95f9bffb2b Mon Sep 17 00:00:00 2001 From: James Estevez Date: Tue, 12 Nov 2024 13:53:05 -0800 Subject: [PATCH 33/65] Update dependencies and drop python <3.11 Use pyproject.toml via uv for dependency management Freezing dependencies does so for a single platform (e.g. linux) and Python version (3.12). Because a dependency can include environment markers to specify transitive dependencies a requirements.txt file generated on one platform/version can fail to install on another. This commit modifies our feature test workflow to use pip's local project installs to provide the correct dependency resolutions for each platform and python version. - Adjusted the Python version matrix in GitHub workflows to include Python 3.11, 3.12, and 3.13, removing older versions. - Simplified the GitHub workflows to use uv --- .github/workflows/feature_test.yml | 55 +- .github/workflows/pre-release.yml | 4 +- .github/workflows/release.yml | 6 +- .github/workflows/release_test.yml | 14 +- .github/workflows/slow_integration_tests.yml | 16 +- .github/workflows/update_dependencies.yml | 2 +- .../tests/test_robotframework.py | 2 +- cumulusci/utils/tests/test_fileutils.py | 2 +- cumulusci/utils/yaml/model_parser.py | 4 +- pyproject.toml | 73 +- requirements.txt | 1 - requirements/dev.txt | 389 ---- requirements/prod.txt | 193 -- requirements_dev.txt | 3 - utility/pin_dependencies.py | 28 - uv.lock | 1965 +++++++++++++++++ 16 files changed, 2056 insertions(+), 701 deletions(-) delete mode 100644 requirements.txt delete mode 100644 requirements/dev.txt delete mode 100644 requirements/prod.txt delete mode 100644 requirements_dev.txt delete mode 100644 utility/pin_dependencies.py create mode 100644 uv.lock diff --git a/.github/workflows/feature_test.yml b/.github/workflows/feature_test.yml index 6520d5475e..31ea13dce8 100644 --- a/.github/workflows/feature_test.yml +++ b/.github/workflows/feature_test.yml @@ -22,17 +22,22 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 1 - - name: Set up Python 3.8 + - name: Set up Python 3.11 id: py uses: actions/setup-python@v4 with: - python-version: 3.8 - cache: pip - cache-dependency-path: "requirements/*.txt" + python-version: 3.11 + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true - name: Install dependencies - run: pip install -r requirements_dev.txt + run: uv sync --group docs - name: Build Docs - run: make docs + run: | + cd docs + uv run sphinx-build -b html . ./_build unit_tests: name: "Unit tests: ${{ matrix.os }}-${{ matrix.python-version }}" @@ -41,39 +46,39 @@ jobs: fail-fast: false matrix: os: [macos-latest, SFDO-Tooling-Ubuntu, SFDO-Tooling-Windows] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] - exclude: - - os: macos-latest - python-version: 3.8 - include: - - os: macos-13 - python-version: 3.8 + python-version: ["3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: python-version: "${{ matrix.python-version }}" - cache: pip - cache-dependency-path: "requirements/*.txt" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true - name: Install dependencies - run: pip install -r requirements_dev.txt + run: uv sync -p ${{ matrix.python-version }} - name: Run Pytest - run: pytest --cov-report= --cov=cumulusci + run: uv run pytest --cov-report= --cov=cumulusci robot_api: name: "Robot: No browser" runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@v4 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 - cache: pip - cache-dependency-path: "requirements/*.txt" - - name: Install Python dependencies - run: pip install -r requirements_dev.txt + python-version: 3.11 + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install dependencies + run: uv sync -p 3.11 - name: Install sfdx run: | mkdir sfdx @@ -90,7 +95,7 @@ jobs: SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} - name: Run robot tests run: | - coverage run --append $(which cci) task run robot \ + uv run coverage run --append $(which cci) task run robot \ --org dev \ -o name "CumulusCI" \ -o suites cumulusci/robotframework/tests \ @@ -98,7 +103,7 @@ jobs: - name: Delete scratch org if: always() run: | - cci org scratch_delete dev + uv run cci org scratch_delete dev - name: Store robot results if: failure() uses: actions/upload-artifact@v4 diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 3531240428..4f7c52d0aa 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -22,10 +22,10 @@ jobs: runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@main - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - name: Install build tool run: python -m pip install hatch diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0b5c66c0ba..8050a53038 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,15 +15,13 @@ jobs: runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@main - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - name: Install build tools run: python -m pip install hatch tomli tomli-w - - name: Pin dependencies - run: python utility/pin_dependencies.py - name: Build source tarball and binary wheel run: hatch build -c - name: Upload to PyPI diff --git a/.github/workflows/release_test.yml b/.github/workflows/release_test.yml index 49fd823218..e6d8b66ba9 100644 --- a/.github/workflows/release_test.yml +++ b/.github/workflows/release_test.yml @@ -37,12 +37,12 @@ jobs: runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@v3 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install build tools run: pip install hatch - name: Test source tarball and binary wheel @@ -71,14 +71,14 @@ jobs: concurrency: release steps: - uses: actions/checkout@v3 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install Python dependencies - run: pip install -r requirements_dev.txt + run: pip install .[test] - name: Install sfdx run: | mkdir sfdx diff --git a/.github/workflows/slow_integration_tests.yml b/.github/workflows/slow_integration_tests.yml index 9c06e222bc..0f75321ed6 100644 --- a/.github/workflows/slow_integration_tests.yml +++ b/.github/workflows/slow_integration_tests.yml @@ -25,16 +25,16 @@ jobs: runs-on: SFDO-Tooling-Ubuntu steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install Python dependencies run: | python -m pip install -U pip - pip install -r requirements_dev.txt + pip install .[test] - name: Install sfdx run: | mkdir sfdx @@ -73,14 +73,14 @@ jobs: # org-shape: "prerelease" steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.11 cache: pip - cache-dependency-path: "requirements/*.txt" + cache-dependency-path: "pyproject.toml" - name: Install Python dependencies - run: pip install -r requirements_dev.txt + run: pip install .[test] - name: Install sfdx run: | mkdir sfdx diff --git a/.github/workflows/update_dependencies.yml b/.github/workflows/update_dependencies.yml index 0e83d8400c..9cf9e0474c 100644 --- a/.github/workflows/update_dependencies.yml +++ b/.github/workflows/update_dependencies.yml @@ -7,4 +7,4 @@ jobs: update_python_dependencies: uses: SFDO-Tooling/.github/.github/workflows/update_python_dependencies.yml@main with: - python-version: 3.8 + python-version: 3.11 diff --git a/cumulusci/tasks/robotframework/tests/test_robotframework.py b/cumulusci/tasks/robotframework/tests/test_robotframework.py index 0a9a4e7a74..537001b888 100644 --- a/cumulusci/tasks/robotframework/tests/test_robotframework.py +++ b/cumulusci/tasks/robotframework/tests/test_robotframework.py @@ -762,7 +762,7 @@ def test_pageobject_docstring(self): class TestRobotPerformanceKeywords: - def setup(self): + def setup_method(self): self.datadir = os.path.dirname(__file__) @contextmanager diff --git a/cumulusci/utils/tests/test_fileutils.py b/cumulusci/utils/tests/test_fileutils.py index 7b09afd2dd..65b0899950 100644 --- a/cumulusci/utils/tests/test_fileutils.py +++ b/cumulusci/utils/tests/test_fileutils.py @@ -199,7 +199,7 @@ def test_resource_test_resource_doesnt_exist_pathlib_relpath(self): class TestFSResourceTempdir(_TestFSResourceShared): - def setup(self): + def setup_method(self): self.tempdir = TemporaryDirectory() self.file = Path(self.tempdir.name) / "testfile.txt" self.file.touch() diff --git a/cumulusci/utils/yaml/model_parser.py b/cumulusci/utils/yaml/model_parser.py index b165b8f409..249d914fbd 100644 --- a/cumulusci/utils/yaml/model_parser.py +++ b/cumulusci/utils/yaml/model_parser.py @@ -1,5 +1,5 @@ -from pathlib import Path, Sequence -from typing import IO, Union +from pathlib import Path +from typing import IO, Sequence, Union from pydantic import BaseModel, ValidationError from pydantic.error_wrappers import ErrorWrapper diff --git a/pyproject.toml b/pyproject.toml index b04f0b66c6..27d3ba95cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "cumulusci" dynamic = ["readme", "version"] description = "Build and release tools for Salesforce developers" license = { text = "BSD 3-Clause License" } -requires-python = ">=3.8" +requires-python = ">=3.11" authors = [ { name = "Salesforce.org", email = "sfdo-mrbelvedere@salesforce.com" }, ] @@ -18,17 +18,14 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ - "click", + "click>=8.1", "cryptography", "python-dateutil", - "docutils<0.17", "Faker", "fs", "github3.py", @@ -36,7 +33,6 @@ dependencies = [ "keyring<=23.0.1", "defusedxml", "lxml", - "markdown-it-py==2.2.0", # resolve dependency conflict between prod/dev "MarkupSafe", "psutil", "pydantic<2", @@ -45,38 +41,46 @@ dependencies = [ "pyyaml", "requests", "requests-futures", - "rich", + "rich>=13.9.4", "robotframework", + "SQLAlchemy<2", "robotframework-pabot", "robotframework-requests", "robotframework-seleniumlibrary<6", - "rst2ansi", + "rst2ansi>=0.1.5", "salesforce-bulk", "sarge", "selenium<4", "simple-salesforce==1.11.4", - "snowfakery", - "SQLAlchemy<2", + "snowfakery>=4.0.0", "xmltodict", ] -[project.optional-dependencies] -docs = ["myst-parser", "Sphinx"] -lint = ["black", "flake8<4", "isort", "pre-commit"] -test = [ - "coverage[toml]", - "factory-boy", - "furo", - "jsonschema", - "pytest<7.1 ", # https://github.com/pytest-dev/pytest/issues/9765 - "pytest-cov", - "pytest-random-order", - "pytest-vcr", - "responses", - "testfixtures", - "tox", - "typeguard<=2.13.3", # TODO: Lots of changes required for v4 - "vcrpy" +[dependency-groups] +docs = [ + "myst-parser>=1.0.0", + "sphinx>=5.3.0", +] +dev = [ + "coverage[toml]>=7.6.1", + "factory-boy>=3.3.1", + "furo>=2023.3.27", + "jsonschema>=4.23.0", + "pytest>=7.0.1", + "pytest-cov>=5.0.0", + "pytest-random-order>=1.1.1", + "pytest-vcr>=1.0.2", + "responses>=0.23.1", + "testfixtures>=8.3.0", + "tox>=4.20.0", + "typeguard<=2.13.3", # TODO: Lots of changes required for v4 + "vcrpy>=6.0.2", +] +lint = [ + "black>=24.8.0", + "flake8<4", + "isort>=5.13.2", + "pre-commit>=3.5.0", ] [project.scripts] @@ -102,16 +106,13 @@ include = [ include = [ "/cumulusci", "/requirements/*", # Needed by tox - "README.md", # needed by hatch-fancy-pypi-readme - "docs/history.md" + "README.md", # needed by hatch-fancy-pypi-readme + "docs/history.md", # ditto ] [tool.hatch.build.targets.wheel] -exclude = [ - "*.sql", - "*.zip" -] +exclude = ["*.sql", "*.zip"] [tool.hatch.metadata.hooks.fancy-pypi-readme] content-type = "text/markdown" @@ -254,7 +255,7 @@ include = [ 'cumulusci/tests/util.py', 'cumulusci/utils/waiting.py', 'cumulusci/utils/xml/robot_xml.py', - 'cumulusci/utils/ziputils.py' + 'cumulusci/utils/ziputils.py', ] # Do not add to this list. Instead use # # pyright: strict @@ -290,5 +291,5 @@ strict = [ 'cumulusci/tasks/release_notes/exceptions.py', 'cumulusci/tasks/salesforce/BaseSalesforceTask.py', 'cumulusci/tasks/vlocity/exceptions.py', - 'cumulusci/utils/soql.py' + 'cumulusci/utils/soql.py', ] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 5eaadb8e90..0000000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --r requirements/prod.txt diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index 9a7d8b1fac..0000000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,389 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --all-extras --output-file=requirements/dev.txt pyproject.toml -# -alabaster==0.7.13 - # via sphinx -appdirs==1.4.4 - # via fs -attrs==24.2.0 - # via - # jsonschema - # pytest - # referencing -authlib==1.3.2 - # via simple-salesforce -babel==2.16.0 - # via sphinx -beautifulsoup4==4.12.3 - # via furo -black==24.8.0 - # via cumulusci (pyproject.toml) -cachetools==5.5.0 - # via tox -certifi==2024.2.2 - # via - # requests - # snowfakery -cffi==1.17.1 - # via cryptography -cfgv==3.4.0 - # via pre-commit -chardet==5.2.0 - # via tox -charset-normalizer==3.3.2 - # via - # requests - # snowfakery -click==8.1.7 - # via - # black - # cumulusci (pyproject.toml) - # snowfakery -colorama==0.4.6 - # via tox -coverage[toml]==7.6.1 - # via - # cumulusci (pyproject.toml) - # pytest-cov -cryptography==43.0.1 - # via - # authlib - # cumulusci (pyproject.toml) - # pyjwt - # secretstorage -defusedxml==0.7.1 - # via cumulusci (pyproject.toml) -distlib==0.3.8 - # via virtualenv -docutils==0.16 - # via - # cumulusci (pyproject.toml) - # myst-parser - # sphinx -factory-boy==3.3.1 - # via cumulusci (pyproject.toml) -faker==24.4.0 - # via - # cumulusci (pyproject.toml) - # factory-boy - # faker-edu - # faker-nonprofit - # snowfakery -faker-edu==1.1.0 - # via snowfakery -faker-nonprofit==1.0.0 - # via snowfakery -filelock==3.15.4 - # via - # tox - # virtualenv -flake8==3.9.2 - # via cumulusci (pyproject.toml) -fs==2.4.16 - # via cumulusci (pyproject.toml) -furo==2023.3.27 - # via cumulusci (pyproject.toml) -github3-py==4.0.1 - # via cumulusci (pyproject.toml) -greenlet==3.0.3 - # via - # snowfakery - # sqlalchemy -gvgen==1.0 - # via snowfakery -identify==2.6.0 - # via pre-commit -idna==3.6 - # via - # requests - # snowfakery - # yarl -imagesize==1.4.1 - # via sphinx -importlib-metadata==8.4.0 - # via - # keyring - # sphinx -importlib-resources==6.4.4 - # via - # jsonschema - # jsonschema-specifications -iniconfig==2.0.0 - # via pytest -isort==5.13.2 - # via cumulusci (pyproject.toml) -jeepney==0.8.0 - # via - # keyring - # secretstorage -jinja2==3.1.3 - # via - # cumulusci (pyproject.toml) - # myst-parser - # snowfakery - # sphinx -jsonschema==4.23.0 - # via cumulusci (pyproject.toml) -jsonschema-specifications==2023.12.1 - # via jsonschema -keyring==23.0.1 - # via cumulusci (pyproject.toml) -lxml==5.3.0 - # via cumulusci (pyproject.toml) -markdown-it-py==2.2.0 - # via - # cumulusci (pyproject.toml) - # mdit-py-plugins - # myst-parser - # rich -markupsafe==2.1.5 - # via - # cumulusci (pyproject.toml) - # jinja2 - # snowfakery -mccabe==0.6.1 - # via flake8 -mdit-py-plugins==0.3.5 - # via myst-parser -mdurl==0.1.2 - # via markdown-it-py -multidict==6.0.5 - # via yarl -mypy-extensions==1.0.0 - # via black -myst-parser==1.0.0 - # via cumulusci (pyproject.toml) -natsort==8.4.0 - # via robotframework-pabot -nodeenv==1.9.1 - # via pre-commit -packaging==24.1 - # via - # black - # pyproject-api - # pytest - # sphinx - # tox -pathspec==0.12.1 - # via black -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==4.2.2 - # via - # black - # tox - # virtualenv -pluggy==1.5.0 - # via - # pytest - # tox -pre-commit==3.5.0 - # via cumulusci (pyproject.toml) -psutil==6.0.0 - # via cumulusci (pyproject.toml) -py==1.11.0 - # via pytest -pycodestyle==2.7.0 - # via flake8 -pycparser==2.22 - # via cffi -pydantic==1.10.14 - # via - # cumulusci (pyproject.toml) - # snowfakery -pyflakes==2.3.1 - # via flake8 -pygments==2.18.0 - # via - # furo - # rich - # sphinx -pyjwt[crypto]==2.9.0 - # via - # cumulusci (pyproject.toml) - # github3-py -pyproject-api==1.7.1 - # via tox -pytest==7.0.1 - # via - # cumulusci (pyproject.toml) - # pytest-cov - # pytest-random-order - # pytest-vcr -pytest-cov==5.0.0 - # via cumulusci (pyproject.toml) -pytest-random-order==1.1.1 - # via cumulusci (pyproject.toml) -pytest-vcr==1.0.2 - # via cumulusci (pyproject.toml) -python-baseconv==1.2.2 - # via snowfakery -python-dateutil==2.9.0.post0 - # via - # cumulusci (pyproject.toml) - # faker - # github3-py - # snowfakery -pytz==2024.1 - # via - # babel - # cumulusci (pyproject.toml) -pyyaml==6.0.1 - # via - # cumulusci (pyproject.toml) - # myst-parser - # pre-commit - # responses - # snowfakery - # vcrpy -referencing==0.35.1 - # via - # jsonschema - # jsonschema-specifications -requests==2.29.0 - # via - # cumulusci (pyproject.toml) - # github3-py - # requests-futures - # responses - # robotframework-requests - # salesforce-bulk - # simple-salesforce - # snowfakery - # sphinx -requests-futures==1.0.1 - # via cumulusci (pyproject.toml) -responses==0.23.1 - # via cumulusci (pyproject.toml) -rich==13.8.0 - # via cumulusci (pyproject.toml) -robotframework==7.0.1 - # via - # cumulusci (pyproject.toml) - # robotframework-pabot - # robotframework-requests - # robotframework-seleniumlibrary - # robotframework-stacktrace -robotframework-pabot==2.18.0 - # via cumulusci (pyproject.toml) -robotframework-pythonlibcore==4.4.1 - # via robotframework-seleniumlibrary -robotframework-requests==0.9.7 - # via cumulusci (pyproject.toml) -robotframework-seleniumlibrary==5.1.3 - # via cumulusci (pyproject.toml) -robotframework-stacktrace==0.4.1 - # via robotframework-pabot -rpds-py==0.20.0 - # via - # jsonschema - # referencing -rst2ansi==0.1.5 - # via cumulusci (pyproject.toml) -salesforce-bulk==2.2.0 - # via cumulusci (pyproject.toml) -sarge==0.1.7.post1 - # via cumulusci (pyproject.toml) -secretstorage==3.3.3 - # via keyring -selenium==3.141.0 - # via - # cumulusci (pyproject.toml) - # robotframework-seleniumlibrary -simple-salesforce==1.11.4 - # via - # cumulusci (pyproject.toml) - # salesforce-bulk -six==1.16.0 - # via - # fs - # python-dateutil - # salesforce-bulk - # snowfakery -snowballstemmer==2.2.0 - # via sphinx -snowfakery==3.6.2 - # via cumulusci (pyproject.toml) -soupsieve==2.6 - # via beautifulsoup4 -sphinx==5.3.0 - # via - # cumulusci (pyproject.toml) - # furo - # myst-parser - # sphinx-basic-ng -sphinx-basic-ng==1.0.0b2 - # via furo -sphinxcontrib-applehelp==1.0.4 - # via sphinx -sphinxcontrib-devhelp==1.0.2 - # via sphinx -sphinxcontrib-htmlhelp==2.0.1 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.3 - # via sphinx -sphinxcontrib-serializinghtml==1.1.5 - # via sphinx -sqlalchemy==1.4.52 - # via - # cumulusci (pyproject.toml) - # snowfakery -testfixtures==8.3.0 - # via cumulusci (pyproject.toml) -tomli==2.0.1 - # via - # black - # coverage - # pyproject-api - # pytest - # tox -tox==4.18.0 - # via cumulusci (pyproject.toml) -typeguard==2.13.3 - # via cumulusci (pyproject.toml) -types-pyyaml==6.0.12.20240808 - # via responses -typing-extensions==4.10.0 - # via - # black - # faker - # pydantic - # rich - # snowfakery -unicodecsv==0.14.1 - # via salesforce-bulk -uritemplate==4.1.1 - # via github3-py -urllib3==1.26.18 - # via - # requests - # responses - # selenium - # snowfakery - # vcrpy -vcrpy==6.0.1 - # via - # cumulusci (pyproject.toml) - # pytest-vcr -virtualenv==20.26.3 - # via - # pre-commit - # tox -wrapt==1.16.0 - # via vcrpy -xmltodict==0.13.0 - # via cumulusci (pyproject.toml) -yarl==1.9.11 - # via vcrpy -zipp==3.20.1 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/prod.txt b/requirements/prod.txt deleted file mode 100644 index 40ae1621a3..0000000000 --- a/requirements/prod.txt +++ /dev/null @@ -1,193 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=requirements/prod.txt pyproject.toml -# -appdirs==1.4.4 - # via fs -authlib==1.3.2 - # via simple-salesforce -certifi==2024.2.2 - # via - # requests - # snowfakery -cffi==1.17.1 - # via cryptography -charset-normalizer==3.3.2 - # via - # requests - # snowfakery -click==8.1.7 - # via - # cumulusci (pyproject.toml) - # snowfakery -cryptography==43.0.1 - # via - # authlib - # cumulusci (pyproject.toml) - # pyjwt - # secretstorage -defusedxml==0.7.1 - # via cumulusci (pyproject.toml) -docutils==0.16 - # via cumulusci (pyproject.toml) -faker==24.4.0 - # via - # cumulusci (pyproject.toml) - # faker-edu - # faker-nonprofit - # snowfakery -faker-edu==1.1.0 - # via snowfakery -faker-nonprofit==1.0.0 - # via snowfakery -fs==2.4.16 - # via cumulusci (pyproject.toml) -github3-py==4.0.1 - # via cumulusci (pyproject.toml) -greenlet==3.0.3 - # via - # snowfakery - # sqlalchemy -gvgen==1.0 - # via snowfakery -idna==3.6 - # via - # requests - # snowfakery -importlib-metadata==8.4.0 - # via keyring -jeepney==0.8.0 - # via - # keyring - # secretstorage -jinja2==3.1.3 - # via - # cumulusci (pyproject.toml) - # snowfakery -keyring==23.0.1 - # via cumulusci (pyproject.toml) -lxml==5.3.0 - # via cumulusci (pyproject.toml) -markdown-it-py==2.2.0 - # via - # cumulusci (pyproject.toml) - # rich -markupsafe==2.1.5 - # via - # cumulusci (pyproject.toml) - # jinja2 - # snowfakery -mdurl==0.1.2 - # via markdown-it-py -natsort==8.4.0 - # via robotframework-pabot -psutil==6.0.0 - # via cumulusci (pyproject.toml) -pycparser==2.22 - # via cffi -pydantic==1.10.14 - # via - # cumulusci (pyproject.toml) - # snowfakery -pygments==2.18.0 - # via rich -pyjwt[crypto]==2.9.0 - # via - # cumulusci (pyproject.toml) - # github3-py -python-baseconv==1.2.2 - # via snowfakery -python-dateutil==2.9.0.post0 - # via - # cumulusci (pyproject.toml) - # faker - # github3-py - # snowfakery -pytz==2024.1 - # via cumulusci (pyproject.toml) -pyyaml==6.0.1 - # via - # cumulusci (pyproject.toml) - # snowfakery -requests==2.29.0 - # via - # cumulusci (pyproject.toml) - # github3-py - # requests-futures - # robotframework-requests - # salesforce-bulk - # simple-salesforce - # snowfakery -requests-futures==1.0.1 - # via cumulusci (pyproject.toml) -rich==13.8.0 - # via cumulusci (pyproject.toml) -robotframework==7.0.1 - # via - # cumulusci (pyproject.toml) - # robotframework-pabot - # robotframework-requests - # robotframework-seleniumlibrary - # robotframework-stacktrace -robotframework-pabot==2.18.0 - # via cumulusci (pyproject.toml) -robotframework-pythonlibcore==4.4.1 - # via robotframework-seleniumlibrary -robotframework-requests==0.9.7 - # via cumulusci (pyproject.toml) -robotframework-seleniumlibrary==5.1.3 - # via cumulusci (pyproject.toml) -robotframework-stacktrace==0.4.1 - # via robotframework-pabot -rst2ansi==0.1.5 - # via cumulusci (pyproject.toml) -salesforce-bulk==2.2.0 - # via cumulusci (pyproject.toml) -sarge==0.1.7.post1 - # via cumulusci (pyproject.toml) -secretstorage==3.3.3 - # via keyring -selenium==3.141.0 - # via - # cumulusci (pyproject.toml) - # robotframework-seleniumlibrary -simple-salesforce==1.11.4 - # via - # cumulusci (pyproject.toml) - # salesforce-bulk -six==1.16.0 - # via - # fs - # python-dateutil - # salesforce-bulk - # snowfakery -snowfakery==3.6.2 - # via cumulusci (pyproject.toml) -sqlalchemy==1.4.52 - # via - # cumulusci (pyproject.toml) - # snowfakery -typing-extensions==4.10.0 - # via - # faker - # pydantic - # rich - # snowfakery -unicodecsv==0.14.1 - # via salesforce-bulk -uritemplate==4.1.1 - # via github3-py -urllib3==1.26.18 - # via - # requests - # selenium - # snowfakery -xmltodict==0.13.0 - # via cumulusci (pyproject.toml) -zipp==3.20.1 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements_dev.txt b/requirements_dev.txt deleted file mode 100644 index 45745ac762..0000000000 --- a/requirements_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ --r requirements/prod.txt --r requirements/dev.txt --e . diff --git a/utility/pin_dependencies.py b/utility/pin_dependencies.py deleted file mode 100644 index eeb6c15c23..0000000000 --- a/utility/pin_dependencies.py +++ /dev/null @@ -1,28 +0,0 @@ -import re -from pathlib import Path - -import tomli -import tomli_w - - -def main(toml_filename: Path, requirements_txt: Path): - with open(toml_filename, "rb") as f: - data = tomli.load(f) - - with open(requirements_txt) as f: - requirements = re.findall(r".*==.*", f.read()) - - pin_dependencies(data, requirements) - - with open(toml_filename, "wb") as f: - tomli_w.dump(data, f) - - -def pin_dependencies(data: dict, requirements: str): - data["project"]["dependencies"] = requirements - - -root = Path(__file__).parent.parent -requirements = root / "requirements" -main(root / "pyproject.toml", requirements / "prod.txt") -print("Updated ", root / "pyproject.toml") diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000..3e3ba17d5b --- /dev/null +++ b/uv.lock @@ -0,0 +1,1965 @@ +version = 1 +requires-python = ">=3.11" +resolution-markers = [ + "platform_python_implementation == 'PyPy'", + "platform_python_implementation != 'PyPy'", +] + +[[package]] +name = "alabaster" +version = "0.7.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/71/a8ee96d1fd95ca04a0d2e2d9c4081dac4c2d2b12f7ddb899c8cb9bfd1532/alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2", size = 11454 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/88/c7083fc61120ab661c5d0b82cb77079fc1429d3f913a456c1c82cf4658f7/alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3", size = 13857 }, +] + +[[package]] +name = "appdirs" +version = "1.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566 }, +] + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "authlib" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/75/47dbab150ef6f9298e227a40c93c7fed5f3ffb67c9fb62cd49f66285e46e/authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2", size = 147313 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/4c/9aa0416a403d5cc80292cb030bcd2c918cce2755e314d8c1aa18656e1e12/Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc", size = 225111 }, +] + +[[package]] +name = "babel" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + +[[package]] +name = "black" +version = "24.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b0/46fb0d4e00372f4a86a6f8efa3cb193c9f64863615e39010b1477e010578/black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", size = 644810 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/a6/0a3aa89de9c283556146dc6dbda20cd63a9c94160a6fbdebaf0918e4a3e1/black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1", size = 1615080 }, + { url = "https://files.pythonhosted.org/packages/db/94/b803d810e14588bb297e565821a947c108390a079e21dbdcb9ab6956cd7a/black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", size = 1438143 }, + { url = "https://files.pythonhosted.org/packages/a5/b5/f485e1bbe31f768e2e5210f52ea3f432256201289fd1a3c0afda693776b0/black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", size = 1738774 }, + { url = "https://files.pythonhosted.org/packages/a8/69/a000fc3736f89d1bdc7f4a879f8aaf516fb03613bb51a0154070383d95d9/black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", size = 1427503 }, + { url = "https://files.pythonhosted.org/packages/a2/a8/05fb14195cfef32b7c8d4585a44b7499c2a4b205e1662c427b941ed87054/black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", size = 1646132 }, + { url = "https://files.pythonhosted.org/packages/41/77/8d9ce42673e5cb9988f6df73c1c5c1d4e9e788053cccd7f5fb14ef100982/black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", size = 1448665 }, + { url = "https://files.pythonhosted.org/packages/cc/94/eff1ddad2ce1d3cc26c162b3693043c6b6b575f538f602f26fe846dfdc75/black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", size = 1762458 }, + { url = "https://files.pythonhosted.org/packages/28/ea/18b8d86a9ca19a6942e4e16759b2fa5fc02bbc0eb33c1b866fcd387640ab/black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", size = 1436109 }, + { url = "https://files.pythonhosted.org/packages/27/1e/83fa8a787180e1632c3d831f7e58994d7aaf23a0961320d21e84f922f919/black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", size = 206504 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/da/e94e26401b62acd6d91df2b52954aceb7f561743aa5ccc32152886c76c96/certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", size = 164886 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/06/a07f096c664aeb9f01624f858c3add0a4e913d6c96257acb4fce61e7de14/certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1", size = 163774 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647 }, + { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434 }, + { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979 }, + { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582 }, + { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645 }, + { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398 }, + { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273 }, + { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577 }, + { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747 }, + { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375 }, + { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232 }, + { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859 }, + { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509 }, + { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870 }, + { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892 }, + { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213 }, + { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404 }, + { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275 }, + { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518 }, + { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182 }, + { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869 }, + { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042 }, + { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275 }, + { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819 }, + { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415 }, + { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212 }, + { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167 }, + { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041 }, + { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397 }, + { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coverage" +version = "7.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11' and python_full_version >= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "43.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/05/07b55d1fa21ac18c3a8c79f764e2514e6f6a9698f1be44994f5adf0d29db/cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805", size = 686989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f3/01fdf26701a26f4b4dbc337a26883ad5bccaa6f1bbbdd29cd89e22f18a1c/cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e", size = 6225303 }, + { url = "https://files.pythonhosted.org/packages/a3/01/4896f3d1b392025d4fcbecf40fdea92d3df8662123f6835d0af828d148fd/cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e", size = 3760905 }, + { url = "https://files.pythonhosted.org/packages/0a/be/f9a1f673f0ed4b7f6c643164e513dbad28dd4f2dcdf5715004f172ef24b6/cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f", size = 3977271 }, + { url = "https://files.pythonhosted.org/packages/4e/49/80c3a7b5514d1b416d7350830e8c422a4d667b6d9b16a9392ebfd4a5388a/cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6", size = 3746606 }, + { url = "https://files.pythonhosted.org/packages/0e/16/a28ddf78ac6e7e3f25ebcef69ab15c2c6be5ff9743dd0709a69a4f968472/cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18", size = 3986484 }, + { url = "https://files.pythonhosted.org/packages/01/f5/69ae8da70c19864a32b0315049866c4d411cce423ec169993d0434218762/cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd", size = 3852131 }, + { url = "https://files.pythonhosted.org/packages/fd/db/e74911d95c040f9afd3612b1f732e52b3e517cb80de8bf183be0b7d413c6/cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73", size = 4075647 }, + { url = "https://files.pythonhosted.org/packages/56/48/7b6b190f1462818b324e674fa20d1d5ef3e24f2328675b9b16189cbf0b3c/cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2", size = 2623873 }, + { url = "https://files.pythonhosted.org/packages/eb/b1/0ebff61a004f7f89e7b65ca95f2f2375679d43d0290672f7713ee3162aff/cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd", size = 3068039 }, + { url = "https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984", size = 6222984 }, + { url = "https://files.pythonhosted.org/packages/2f/78/55356eb9075d0be6e81b59f45c7b48df87f76a20e73893872170471f3ee8/cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5", size = 3762968 }, + { url = "https://files.pythonhosted.org/packages/2a/2c/488776a3dc843f95f86d2f957ca0fc3407d0242b50bede7fad1e339be03f/cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4", size = 3977754 }, + { url = "https://files.pythonhosted.org/packages/7c/04/2345ca92f7a22f601a9c62961741ef7dd0127c39f7310dffa0041c80f16f/cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7", size = 3749458 }, + { url = "https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405", size = 3988220 }, + { url = "https://files.pythonhosted.org/packages/21/ce/b9c9ff56c7164d8e2edfb6c9305045fbc0df4508ccfdb13ee66eb8c95b0e/cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16", size = 3853898 }, + { url = "https://files.pythonhosted.org/packages/2a/33/b3682992ab2e9476b9c81fff22f02c8b0a1e6e1d49ee1750a67d85fd7ed2/cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73", size = 4076592 }, + { url = "https://files.pythonhosted.org/packages/81/1e/ffcc41b3cebd64ca90b28fd58141c5f68c83d48563c88333ab660e002cd3/cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995", size = 2623145 }, + { url = "https://files.pythonhosted.org/packages/87/5c/3dab83cc4aba1f4b0e733e3f0c3e7d4386440d660ba5b1e3ff995feb734d/cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362", size = 3068026 }, +] + +[[package]] +name = "cumulusci" +version = "3.93.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "cryptography" }, + { name = "defusedxml" }, + { name = "faker" }, + { name = "fs" }, + { name = "github3-py" }, + { name = "jinja2" }, + { name = "keyring" }, + { name = "lxml" }, + { name = "markupsafe" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-futures" }, + { name = "rich" }, + { name = "robotframework" }, + { name = "robotframework-pabot" }, + { name = "robotframework-requests" }, + { name = "robotframework-seleniumlibrary" }, + { name = "rst2ansi" }, + { name = "salesforce-bulk" }, + { name = "sarge" }, + { name = "selenium" }, + { name = "simple-salesforce" }, + { name = "snowfakery" }, + { name = "sqlalchemy" }, + { name = "xmltodict" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coverage", extra = ["toml"] }, + { name = "factory-boy" }, + { name = "furo" }, + { name = "jsonschema" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-random-order" }, + { name = "pytest-vcr" }, + { name = "responses" }, + { name = "testfixtures" }, + { name = "tox" }, + { name = "typeguard" }, + { name = "vcrpy" }, +] +docs = [ + { name = "myst-parser" }, + { name = "sphinx" }, +] +lint = [ + { name = "black" }, + { name = "flake8" }, + { name = "isort" }, + { name = "pre-commit" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.1" }, + { name = "cryptography" }, + { name = "defusedxml" }, + { name = "faker" }, + { name = "fs" }, + { name = "github3-py" }, + { name = "jinja2" }, + { name = "keyring", specifier = "<=23.0.1" }, + { name = "lxml" }, + { name = "markupsafe" }, + { name = "psutil" }, + { name = "pydantic", specifier = "<2" }, + { name = "pyjwt" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-futures" }, + { name = "rich", specifier = ">=13.9.4" }, + { name = "robotframework" }, + { name = "robotframework-pabot" }, + { name = "robotframework-requests" }, + { name = "robotframework-seleniumlibrary", specifier = "<6" }, + { name = "rst2ansi", specifier = ">=0.1.5" }, + { name = "salesforce-bulk" }, + { name = "sarge" }, + { name = "selenium", specifier = "<4" }, + { name = "simple-salesforce", specifier = "==1.11.4" }, + { name = "snowfakery", directory = "../Snowfakery" }, + { name = "sqlalchemy", specifier = "<2" }, + { name = "xmltodict" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "coverage", extras = ["toml"], specifier = ">=7.6.1" }, + { name = "factory-boy", specifier = ">=3.3.1" }, + { name = "furo", specifier = ">=2023.3.27" }, + { name = "jsonschema", specifier = ">=4.23.0" }, + { name = "pytest", specifier = ">=7.0.1" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "pytest-random-order", specifier = ">=1.1.1" }, + { name = "pytest-vcr", specifier = ">=1.0.2" }, + { name = "responses", specifier = ">=0.23.1" }, + { name = "testfixtures", specifier = ">=8.3.0" }, + { name = "tox", specifier = ">=4.20.0" }, + { name = "typeguard", specifier = "<=2.13.3" }, + { name = "vcrpy", specifier = ">=6.0.2" }, +] +docs = [ + { name = "myst-parser", specifier = ">=1.0.0" }, + { name = "sphinx", specifier = ">=5.3.0" }, +] +lint = [ + { name = "black", specifier = ">=24.8.0" }, + { name = "flake8", specifier = "<4" }, + { name = "isort", specifier = ">=5.13.2" }, + { name = "pre-commit", specifier = ">=3.5.0" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "docutils" +version = "0.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc", size = 1962041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/44/8a15e45ffa96e6cf82956dd8d7af9e666357e16b0d93b253903475ee947f/docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", size = 548181 }, +] + +[[package]] +name = "factory-boy" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/3d/8070dde623341401b1c80156583d4c793058fe250450178218bb6e45526c/factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0", size = 163924 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/44ec67152f3129d0114c1499dd34f0a0a0faf43d9c2af05bc535746ca482/factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca", size = 36878 }, +] + +[[package]] +name = "faker" +version = "24.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/18/87b247323b6fbc006d5579aa4e783d43f29b3becf71fcb212fda3e647621/Faker-24.4.0.tar.gz", hash = "sha256:a5ddccbe97ab691fad6bd8036c31f5697cfaa550e62e000078d1935fa8a7ec2e", size = 1724351 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/ac/b070934c1f1b7a6deefe5f4cda2a6ba988277765065d658f2d7e2bbb79dd/Faker-24.4.0-py3-none-any.whl", hash = "sha256:998c29ee7d64429bd59204abffa9ba11f784fb26c7b9df4def78d1a70feb36a7", size = 1762024 }, +] + +[[package]] +name = "faker-edu" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/a1/e202e07a03e0c6a0e96ba054cb7bccc29f053c0e4f61e55a42905c6cab40/faker-edu-1.1.0.tar.gz", hash = "sha256:4f2117a969b42a0adf99ececdfebf2fb65066ea6fe49a1eb01a1168c32ff5485", size = 6133 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/c2/4c621cc8132e67db86fe145ff13685ecc4620b601430b13bce145de4595e/faker_edu-1.1.0-py3-none-any.whl", hash = "sha256:1f0b025d5b66273ae663d88837d7c2616ce1f48289c74ecc2aee749a6693754e", size = 6723 }, +] + +[[package]] +name = "faker-nonprofit" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "faker" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6b/9e/16412dd623985d8d3cb8db22e05d6764d61795ee86af5dff0654f4f69499/faker-nonprofit-1.0.0.tar.gz", hash = "sha256:bcadd173a185ae8fb9dd184010cd55c9ebac034ea893f40d51beb5be93216983", size = 3392 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/3e/e338da76206031aa2d9c187f7929554991edf0153bc12a2e5f6457a93462/faker_nonprofit-1.0.0-py3-none-any.whl", hash = "sha256:ba98ae0a05bc139941db34853e4f6880a480b679e2e0d59f7b9bd18a540a6232", size = 4009 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "flake8" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/47/15b267dfe7e03dca4c4c06e7eadbd55ef4dfd368b13a0bab36d708b14366/flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b", size = 164777 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/80/35a0716e5d5101e643404dabd20f07f5528a21f3ef4032d31a49c913237b/flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907", size = 73147 }, +] + +[[package]] +name = "fs" +version = "2.4.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appdirs" }, + { name = "setuptools" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/a9/af5bfd5a92592c16cdae5c04f68187a309be8a146b528eac3c6e30edbad2/fs-2.4.16.tar.gz", hash = "sha256:ae97c7d51213f4b70b6a958292530289090de3a7e15841e108fbe144f069d313", size = 187441 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/5c/a3d95dc1ec6cdeb032d789b552ecc76effa3557ea9186e1566df6aac18df/fs-2.4.16-py2.py3-none-any.whl", hash = "sha256:660064febbccda264ae0b6bace80a8d1be9e089e0a5eb2427b7d517f9a91545c", size = 135261 }, +] + +[[package]] +name = "furo" +version = "2023.3.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/21/233938933f1629a4933c8fce2f803cc8fd211ca563ea4337cb44920bbbfa/furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5", size = 1636618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/8c/fa66eb31b1b89b9208269f1fea5edcbecd52b274e5c7afadb9152fb3d4ca/furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521", size = 327605 }, +] + +[[package]] +name = "github3-py" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/91/603bcaf8cd1b3927de64bf56c3a8915f6653ea7281919140c5bcff2bfe7b/github3.py-4.0.1.tar.gz", hash = "sha256:30d571076753efc389edc7f9aaef338a4fcb24b54d8968d5f39b1342f45ddd36", size = 36214038 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/2394d4fb542574678b0ba342daf734d4d811768da3c2ee0c84d509dcb26c/github3.py-4.0.1-py3-none-any.whl", hash = "sha256:a89af7de25650612d1da2f0609622bcdeb07ee8a45a1c06b2d16a05e4234e753", size = 151800 }, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", size = 182013 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/20/68a278a6f93fa36e21cfc3d7599399a8a831225644eb3b6b18755cd3d6fc/greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61", size = 271666 }, + { url = "https://files.pythonhosted.org/packages/21/b4/90e06e07c78513ab03855768200bdb35c8e764e805b3f14fb488e56f82dc/greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559", size = 657689 }, + { url = "https://files.pythonhosted.org/packages/f6/a2/0ed21078039072f9dc738bbf3af12b103a84106b1385ac4723841f846ce7/greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e", size = 673009 }, + { url = "https://files.pythonhosted.org/packages/42/11/42ad6b1104c357826bbee7d7b9e4f24dbd9fde94899a03efb004aab62963/greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33", size = 667432 }, + { url = "https://files.pythonhosted.org/packages/bb/6b/384dee7e0121cbd1757bdc1824a5ee28e43d8d4e3f99aa59521f629442fe/greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379", size = 667442 }, + { url = "https://files.pythonhosted.org/packages/c6/1f/12d5a6cc26e8b483c2e7975f9c22e088ac735c0d8dcb8a8f72d31a4e5f04/greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22", size = 620032 }, + { url = "https://files.pythonhosted.org/packages/c7/ec/85b647e59e0f137c7792a809156f413e38379cf7f3f2e1353c37f4be4026/greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3", size = 1154218 }, + { url = "https://files.pythonhosted.org/packages/94/ed/1e5f4bca691a81700e5a88e86d6f0e538acb10188cd2cc17140e523255ef/greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d", size = 1180754 }, + { url = "https://files.pythonhosted.org/packages/47/79/26d54d7d700ef65b689fc2665a40846d13e834da0486674a8d4f0f371a47/greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728", size = 292822 }, + { url = "https://files.pythonhosted.org/packages/a2/2f/461615adc53ba81e99471303b15ac6b2a6daa8d2a0f7f77fd15605e16d5b/greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be", size = 273085 }, + { url = "https://files.pythonhosted.org/packages/e9/55/2c3cfa3cdbb940cf7321fbcf544f0e9c74898eed43bf678abf416812d132/greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e", size = 660514 }, + { url = "https://files.pythonhosted.org/packages/38/77/efb21ab402651896c74f24a172eb4d7479f9f53898bd5e56b9e20bb24ffd/greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676", size = 674295 }, + { url = "https://files.pythonhosted.org/packages/74/3a/92f188ace0190f0066dca3636cf1b09481d0854c46e92ec5e29c7cefe5b1/greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc", size = 669395 }, + { url = "https://files.pythonhosted.org/packages/63/0f/847ed02cdfce10f0e6e3425cd054296bddb11a17ef1b34681fa01a055187/greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230", size = 670455 }, + { url = "https://files.pythonhosted.org/packages/bd/37/56b0da468a85e7704f3b2bc045015301bdf4be2184a44868c71f6dca6fe2/greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf", size = 625692 }, + { url = "https://files.pythonhosted.org/packages/7c/68/b5f4084c0a252d7e9c0d95fc1cfc845d08622037adb74e05be3a49831186/greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305", size = 1152597 }, + { url = "https://files.pythonhosted.org/packages/a4/fa/31e22345518adcd69d1d6ab5087a12c178aa7f3c51103f6d5d702199d243/greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6", size = 1181043 }, + { url = "https://files.pythonhosted.org/packages/53/80/3d94d5999b4179d91bcc93745d1b0815b073d61be79dd546b840d17adb18/greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2", size = 293635 }, +] + +[[package]] +name = "gvgen" +version = "1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/59/bf1fee74afaa055ae999d899369eb3278b55e3503be281a2f2cdf8ae6824/GvGen-1.0.tar.gz", hash = "sha256:e8d2ae8e042a6a96150e814f57402d142aa768943d827443409acf925ee756d2", size = 8636 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/74/5f2a4ddcc45dc0682f9c331ceb4f5f53fcaaa0aa6898edd9938827d15783/GvGen-1.0-py3-none-any.whl", hash = "sha256:6b84f00c9cd55298248d8b24a7f4f97af14f3896984dee07df391e47aac20079", size = 7997 }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972 }, +] + +[[package]] +name = "idna" +version = "3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567 }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "isort" +version = "5.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310 }, +] + +[[package]] +name = "jeepney" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/f4/154cf374c2daf2020e05c3c6a03c91348d59b23c5366e968feb198306fdf/jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806", size = 106005 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755", size = 48435 }, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/5e/3a21abf3cd467d7876045335e681d276ac32492febe6d98ad89562d1a7e1/Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90", size = 268261 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/6d/6de6be2d02603ab56e72997708809e8a5b0fbfee080735109b40a3564843/Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa", size = 133236 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b9/cc0cc592e7c195fb8a650c1d5990b10175cf13b4c97465c72ec841de9e4b/jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc", size = 13983 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/07/44bd408781594c4d0a027666ef27fab1e441b109dc3b76b4f836f8fd04fe/jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c", size = 18482 }, +] + +[[package]] +name = "keyring" +version = "23.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/b5/b27458e1d2adf2a11c6e95c67ac63f828e96fe7e166132e5dacbe03e88c0/keyring-23.0.1.tar.gz", hash = "sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8", size = 59185 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/f9/41230ac47f738f1ba66676dc8d3b30ca5b1f9eb0230fc204bcd9836c4ae9/keyring-23.0.1-py3-none-any.whl", hash = "sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48", size = 33013 }, +] + +[[package]] +name = "lxml" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/6b/20c3a4b24751377aaa6307eb230b66701024012c29dd374999cc92983269/lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f", size = 3679318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/a8/449faa2a3cbe6a99f8d38dcd51a3ee8844c17862841a6f769ea7c2a9cd0f/lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b", size = 8141056 }, + { url = "https://files.pythonhosted.org/packages/ac/8a/ae6325e994e2052de92f894363b038351c50ee38749d30cc6b6d96aaf90f/lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18", size = 4425238 }, + { url = "https://files.pythonhosted.org/packages/f8/fb/128dddb7f9086236bce0eeae2bfb316d138b49b159f50bc681d56c1bdd19/lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442", size = 5095197 }, + { url = "https://files.pythonhosted.org/packages/b4/f9/a181a8ef106e41e3086629c8bdb2d21a942f14c84a0e77452c22d6b22091/lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4", size = 4809809 }, + { url = "https://files.pythonhosted.org/packages/25/2f/b20565e808f7f6868aacea48ddcdd7e9e9fb4c799287f21f1a6c7c2e8b71/lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f", size = 5407593 }, + { url = "https://files.pythonhosted.org/packages/23/0e/caac672ec246d3189a16c4d364ed4f7d6bf856c080215382c06764058c08/lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e", size = 4866657 }, + { url = "https://files.pythonhosted.org/packages/67/a4/1f5fbd3f58d4069000522196b0b776a014f3feec1796da03e495cf23532d/lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c", size = 4967017 }, + { url = "https://files.pythonhosted.org/packages/ee/73/623ecea6ca3c530dd0a4ed0d00d9702e0e85cd5624e2d5b93b005fe00abd/lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16", size = 4810730 }, + { url = "https://files.pythonhosted.org/packages/1d/ce/fb84fb8e3c298f3a245ae3ea6221c2426f1bbaa82d10a88787412a498145/lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79", size = 5455154 }, + { url = "https://files.pythonhosted.org/packages/b1/72/4d1ad363748a72c7c0411c28be2b0dc7150d91e823eadad3b91a4514cbea/lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080", size = 4969416 }, + { url = "https://files.pythonhosted.org/packages/42/07/b29571a58a3a80681722ea8ed0ba569211d9bb8531ad49b5cacf6d409185/lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654", size = 5013672 }, + { url = "https://files.pythonhosted.org/packages/b9/93/bde740d5a58cf04cbd38e3dd93ad1e36c2f95553bbf7d57807bc6815d926/lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d", size = 4878644 }, + { url = "https://files.pythonhosted.org/packages/56/b5/645c8c02721d49927c93181de4017164ec0e141413577687c3df8ff0800f/lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763", size = 5511531 }, + { url = "https://files.pythonhosted.org/packages/85/3f/6a99a12d9438316f4fc86ef88c5d4c8fb674247b17f3173ecadd8346b671/lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec", size = 5402065 }, + { url = "https://files.pythonhosted.org/packages/80/8a/df47bff6ad5ac57335bf552babfb2408f9eb680c074ec1ba412a1a6af2c5/lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be", size = 5069775 }, + { url = "https://files.pythonhosted.org/packages/08/ae/e7ad0f0fbe4b6368c5ee1e3ef0c3365098d806d42379c46c1ba2802a52f7/lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9", size = 3474226 }, + { url = "https://files.pythonhosted.org/packages/c3/b5/91c2249bfac02ee514ab135e9304b89d55967be7e53e94a879b74eec7a5c/lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1", size = 3814971 }, + { url = "https://files.pythonhosted.org/packages/eb/6d/d1f1c5e40c64bf62afd7a3f9b34ce18a586a1cccbf71e783cd0a6d8e8971/lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859", size = 8171753 }, + { url = "https://files.pythonhosted.org/packages/bd/83/26b1864921869784355459f374896dcf8b44d4af3b15d7697e9156cb2de9/lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e", size = 4441955 }, + { url = "https://files.pythonhosted.org/packages/e0/d2/e9bff9fb359226c25cda3538f664f54f2804f4b37b0d7c944639e1a51f69/lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f", size = 5050778 }, + { url = "https://files.pythonhosted.org/packages/88/69/6972bfafa8cd3ddc8562b126dd607011e218e17be313a8b1b9cc5a0ee876/lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e", size = 4748628 }, + { url = "https://files.pythonhosted.org/packages/5d/ea/a6523c7c7f6dc755a6eed3d2f6d6646617cad4d3d6d8ce4ed71bfd2362c8/lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179", size = 5322215 }, + { url = "https://files.pythonhosted.org/packages/99/37/396fbd24a70f62b31d988e4500f2068c7f3fd399d2fd45257d13eab51a6f/lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a", size = 4813963 }, + { url = "https://files.pythonhosted.org/packages/09/91/e6136f17459a11ce1757df864b213efbeab7adcb2efa63efb1b846ab6723/lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3", size = 4923353 }, + { url = "https://files.pythonhosted.org/packages/1d/7c/2eeecf87c9a1fca4f84f991067c693e67340f2b7127fc3eca8fa29d75ee3/lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1", size = 4740541 }, + { url = "https://files.pythonhosted.org/packages/3b/ed/4c38ba58defca84f5f0d0ac2480fdcd99fc7ae4b28fc417c93640a6949ae/lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d", size = 5346504 }, + { url = "https://files.pythonhosted.org/packages/a5/22/bbd3995437e5745cb4c2b5d89088d70ab19d4feabf8a27a24cecb9745464/lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c", size = 4898077 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/94537acfb5b8f18235d13186d247bca478fea5e87d224644e0fe907df976/lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99", size = 4946543 }, + { url = "https://files.pythonhosted.org/packages/8d/e8/4b15df533fe8e8d53363b23a41df9be907330e1fa28c7ca36893fad338ee/lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff", size = 4816841 }, + { url = "https://files.pythonhosted.org/packages/1a/e7/03f390ea37d1acda50bc538feb5b2bda6745b25731e4e76ab48fae7106bf/lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a", size = 5417341 }, + { url = "https://files.pythonhosted.org/packages/ea/99/d1133ab4c250da85a883c3b60249d3d3e7c64f24faff494cf0fd23f91e80/lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8", size = 5327539 }, + { url = "https://files.pythonhosted.org/packages/7d/ed/e6276c8d9668028213df01f598f385b05b55a4e1b4662ee12ef05dab35aa/lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d", size = 5012542 }, + { url = "https://files.pythonhosted.org/packages/36/88/684d4e800f5aa28df2a991a6a622783fb73cf0e46235cfa690f9776f032e/lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30", size = 3486454 }, + { url = "https://files.pythonhosted.org/packages/fc/82/ace5a5676051e60355bd8fb945df7b1ba4f4fb8447f2010fb816bfd57724/lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f", size = 3816857 }, + { url = "https://files.pythonhosted.org/packages/94/6a/42141e4d373903bfea6f8e94b2f554d05506dfda522ada5343c651410dc8/lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a", size = 8156284 }, + { url = "https://files.pythonhosted.org/packages/91/5e/fa097f0f7d8b3d113fb7312c6308af702f2667f22644441715be961f2c7e/lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd", size = 4432407 }, + { url = "https://files.pythonhosted.org/packages/2d/a1/b901988aa6d4ff937f2e5cfc114e4ec561901ff00660c3e56713642728da/lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51", size = 5048331 }, + { url = "https://files.pythonhosted.org/packages/30/0f/b2a54f48e52de578b71bbe2a2f8160672a8a5e103df3a78da53907e8c7ed/lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b", size = 4744835 }, + { url = "https://files.pythonhosted.org/packages/82/9d/b000c15538b60934589e83826ecbc437a1586488d7c13f8ee5ff1f79a9b8/lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002", size = 5316649 }, + { url = "https://files.pythonhosted.org/packages/e3/ee/ffbb9eaff5e541922611d2c56b175c45893d1c0b8b11e5a497708a6a3b3b/lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4", size = 4812046 }, + { url = "https://files.pythonhosted.org/packages/15/ff/7ff89d567485c7b943cdac316087f16b2399a8b997007ed352a1248397e5/lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492", size = 4918597 }, + { url = "https://files.pythonhosted.org/packages/c6/a3/535b6ed8c048412ff51268bdf4bf1cf052a37aa7e31d2e6518038a883b29/lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3", size = 4738071 }, + { url = "https://files.pythonhosted.org/packages/7a/8f/cbbfa59cb4d4fd677fe183725a76d8c956495d7a3c7f111ab8f5e13d2e83/lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4", size = 5342213 }, + { url = "https://files.pythonhosted.org/packages/5c/fb/db4c10dd9958d4b52e34d1d1f7c1f434422aeaf6ae2bbaaff2264351d944/lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367", size = 4893749 }, + { url = "https://files.pythonhosted.org/packages/f2/38/bb4581c143957c47740de18a3281a0cab7722390a77cc6e610e8ebf2d736/lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832", size = 4945901 }, + { url = "https://files.pythonhosted.org/packages/fc/d5/18b7de4960c731e98037bd48fa9f8e6e8f2558e6fbca4303d9b14d21ef3b/lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff", size = 4815447 }, + { url = "https://files.pythonhosted.org/packages/97/a8/cd51ceaad6eb849246559a8ef60ae55065a3df550fc5fcd27014361c1bab/lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd", size = 5411186 }, + { url = "https://files.pythonhosted.org/packages/89/c3/1e3dabab519481ed7b1fdcba21dcfb8832f57000733ef0e71cf6d09a5e03/lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb", size = 5324481 }, + { url = "https://files.pythonhosted.org/packages/b6/17/71e9984cf0570cd202ac0a1c9ed5c1b8889b0fc8dc736f5ef0ffb181c284/lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b", size = 5011053 }, + { url = "https://files.pythonhosted.org/packages/69/68/9f7e6d3312a91e30829368c2b3217e750adef12a6f8eb10498249f4e8d72/lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957", size = 3485634 }, + { url = "https://files.pythonhosted.org/packages/7d/db/214290d58ad68c587bd5d6af3d34e56830438733d0d0856c0275fde43652/lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d", size = 3814417 }, +] + +[[package]] +name = "markdown-it-py" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/c0/59bd6d0571986f72899288a95d9d6178d0eebd70b6650f1bb3f0da90f8f7/markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1", size = 67120 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/25/2d88e8feee8e055d015343f9b86e370a1ccbec546f2865c98397aaef24af/markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", size = 84466 }, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, + { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, + { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, + { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, + { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, + { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, + { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, + { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, + { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, + { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, + { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, + { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, + { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, + { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, + { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, + { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, + { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, + { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, + { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, + { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, +] + +[[package]] +name = "mccabe" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/18/fa675aa501e11d6d6ca0ae73a101b2f3571a565e0f7d38e062eec18a91ee/mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f", size = 8612 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/89/479dc97e18549e21354893e4ee4ef36db1d237534982482c3681ee6e7b57/mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", size = 8556 }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/e7/cc2720da8a32724b36d04c6dba5644154cdf883a1482b3bbb81959a642ed/mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a", size = 39871 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/4c/a9b222f045f98775034d243198212cbea36d3524c3ee1e8ab8c0346d6953/mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e", size = 52087 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "multidict" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/13/df3505a46d0cd08428e4c8169a196131d1b0c4b515c3649829258843dde6/multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6", size = 48570 }, + { url = "https://files.pythonhosted.org/packages/f0/e1/a215908bfae1343cdb72f805366592bdd60487b4232d039c437fe8f5013d/multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156", size = 29316 }, + { url = "https://files.pythonhosted.org/packages/70/0f/6dc70ddf5d442702ed74f298d69977f904960b82368532c88e854b79f72b/multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb", size = 29640 }, + { url = "https://files.pythonhosted.org/packages/d8/6d/9c87b73a13d1cdea30b321ef4b3824449866bd7f7127eceed066ccb9b9ff/multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b", size = 131067 }, + { url = "https://files.pythonhosted.org/packages/cc/1e/1b34154fef373371fd6c65125b3d42ff5f56c7ccc6bfff91b9b3c60ae9e0/multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72", size = 138507 }, + { url = "https://files.pythonhosted.org/packages/fb/e0/0bc6b2bac6e461822b5f575eae85da6aae76d0e2a79b6665d6206b8e2e48/multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304", size = 133905 }, + { url = "https://files.pythonhosted.org/packages/ba/af/73d13b918071ff9b2205fcf773d316e0f8fefb4ec65354bbcf0b10908cc6/multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351", size = 129004 }, + { url = "https://files.pythonhosted.org/packages/74/21/23960627b00ed39643302d81bcda44c9444ebcdc04ee5bedd0757513f259/multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb", size = 121308 }, + { url = "https://files.pythonhosted.org/packages/8b/5c/cf282263ffce4a596ed0bb2aa1a1dddfe1996d6a62d08842a8d4b33dca13/multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3", size = 132608 }, + { url = "https://files.pythonhosted.org/packages/d7/3e/97e778c041c72063f42b290888daff008d3ab1427f5b09b714f5a8eff294/multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399", size = 127029 }, + { url = "https://files.pythonhosted.org/packages/47/ac/3efb7bfe2f3aefcf8d103e9a7162572f01936155ab2f7ebcc7c255a23212/multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423", size = 137594 }, + { url = "https://files.pythonhosted.org/packages/42/9b/6c6e9e8dc4f915fc90a9b7798c44a30773dea2995fdcb619870e705afe2b/multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3", size = 134556 }, + { url = "https://files.pythonhosted.org/packages/1d/10/8e881743b26aaf718379a14ac58572a240e8293a1c9d68e1418fb11c0f90/multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753", size = 130993 }, + { url = "https://files.pythonhosted.org/packages/45/84/3eb91b4b557442802d058a7579e864b329968c8d0ea57d907e7023c677f2/multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80", size = 26405 }, + { url = "https://files.pythonhosted.org/packages/9f/0b/ad879847ecbf6d27e90a6eabb7eff6b62c129eefe617ea45eae7c1f0aead/multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926", size = 28795 }, + { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713 }, + { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516 }, + { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557 }, + { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170 }, + { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836 }, + { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475 }, + { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049 }, + { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370 }, + { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178 }, + { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567 }, + { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822 }, + { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656 }, + { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360 }, + { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382 }, + { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529 }, + { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 }, + { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 }, + { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 }, + { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 }, + { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 }, + { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 }, + { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 }, + { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 }, + { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 }, + { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 }, + { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 }, + { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 }, + { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 }, + { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426 }, + { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531 }, + { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "myst-parser" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/69/fbddb50198c6b0901a981e72ae30f1b7769d2dfac88071f7df41c946d133/myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae", size = 84224 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/1f/1621ef434ac5da26c30d31fcca6d588e3383344902941713640ba717fa87/myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c", size = 77312 }, +] + +[[package]] +name = "natsort" +version = "8.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/a9/a0c57aee75f77794adaf35322f8b6404cbd0f89ad45c87197a937764b7d0/natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581", size = 76575 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/82/7a9d0550484a62c6da82858ee9419f3dd1ccc9aa1c26a1e43da3ecd20b0d/natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c", size = 38268 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pre-commit" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b3/4ae08d21eb097162f5aad37f4585f8069a86402ed7f5362cc9ae097f9572/pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32", size = 177079 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/75/526915fedf462e05eeb1c75ceaf7e3f9cde7b5ce6f62740fe5f7f19a0050/pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660", size = 203698 }, +] + +[[package]] +name = "propcache" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/4d/5e5a60b78dbc1d464f8a7bbaeb30957257afdc8512cbb9dfd5659304f5cd/propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70", size = 40951 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/1c/71eec730e12aec6511e702ad0cd73c2872eccb7cad39de8ba3ba9de693ef/propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354", size = 80811 }, + { url = "https://files.pythonhosted.org/packages/89/c3/7e94009f9a4934c48a371632197406a8860b9f08e3f7f7d922ab69e57a41/propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de", size = 46365 }, + { url = "https://files.pythonhosted.org/packages/c0/1d/c700d16d1d6903aeab28372fe9999762f074b80b96a0ccc953175b858743/propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87", size = 45602 }, + { url = "https://files.pythonhosted.org/packages/2e/5e/4a3e96380805bf742712e39a4534689f4cddf5fa2d3a93f22e9fd8001b23/propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016", size = 236161 }, + { url = "https://files.pythonhosted.org/packages/a5/85/90132481183d1436dff6e29f4fa81b891afb6cb89a7306f32ac500a25932/propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb", size = 244938 }, + { url = "https://files.pythonhosted.org/packages/4a/89/c893533cb45c79c970834274e2d0f6d64383ec740be631b6a0a1d2b4ddc0/propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2", size = 243576 }, + { url = "https://files.pythonhosted.org/packages/8c/56/98c2054c8526331a05f205bf45cbb2cda4e58e56df70e76d6a509e5d6ec6/propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4", size = 236011 }, + { url = "https://files.pythonhosted.org/packages/2d/0c/8b8b9f8a6e1abd869c0fa79b907228e7abb966919047d294ef5df0d136cf/propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504", size = 224834 }, + { url = "https://files.pythonhosted.org/packages/18/bb/397d05a7298b7711b90e13108db697732325cafdcd8484c894885c1bf109/propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178", size = 224946 }, + { url = "https://files.pythonhosted.org/packages/25/19/4fc08dac19297ac58135c03770b42377be211622fd0147f015f78d47cd31/propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d", size = 217280 }, + { url = "https://files.pythonhosted.org/packages/7e/76/c79276a43df2096ce2aba07ce47576832b1174c0c480fe6b04bd70120e59/propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2", size = 220088 }, + { url = "https://files.pythonhosted.org/packages/c3/9a/8a8cf428a91b1336b883f09c8b884e1734c87f724d74b917129a24fe2093/propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db", size = 233008 }, + { url = "https://files.pythonhosted.org/packages/25/7b/768a8969abd447d5f0f3333df85c6a5d94982a1bc9a89c53c154bf7a8b11/propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b", size = 237719 }, + { url = "https://files.pythonhosted.org/packages/ed/0d/e5d68ccc7976ef8b57d80613ac07bbaf0614d43f4750cf953f0168ef114f/propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b", size = 227729 }, + { url = "https://files.pythonhosted.org/packages/05/64/17eb2796e2d1c3d0c431dc5f40078d7282f4645af0bb4da9097fbb628c6c/propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1", size = 40473 }, + { url = "https://files.pythonhosted.org/packages/83/c5/e89fc428ccdc897ade08cd7605f174c69390147526627a7650fb883e0cd0/propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71", size = 44921 }, + { url = "https://files.pythonhosted.org/packages/7c/46/a41ca1097769fc548fc9216ec4c1471b772cc39720eb47ed7e38ef0006a9/propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2", size = 80800 }, + { url = "https://files.pythonhosted.org/packages/75/4f/93df46aab9cc473498ff56be39b5f6ee1e33529223d7a4d8c0a6101a9ba2/propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7", size = 46443 }, + { url = "https://files.pythonhosted.org/packages/0b/17/308acc6aee65d0f9a8375e36c4807ac6605d1f38074b1581bd4042b9fb37/propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8", size = 45676 }, + { url = "https://files.pythonhosted.org/packages/65/44/626599d2854d6c1d4530b9a05e7ff2ee22b790358334b475ed7c89f7d625/propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793", size = 246191 }, + { url = "https://files.pythonhosted.org/packages/f2/df/5d996d7cb18df076debae7d76ac3da085c0575a9f2be6b1f707fe227b54c/propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09", size = 251791 }, + { url = "https://files.pythonhosted.org/packages/2e/6d/9f91e5dde8b1f662f6dd4dff36098ed22a1ef4e08e1316f05f4758f1576c/propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89", size = 253434 }, + { url = "https://files.pythonhosted.org/packages/3c/e9/1b54b7e26f50b3e0497cd13d3483d781d284452c2c50dd2a615a92a087a3/propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e", size = 248150 }, + { url = "https://files.pythonhosted.org/packages/a7/ef/a35bf191c8038fe3ce9a414b907371c81d102384eda5dbafe6f4dce0cf9b/propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9", size = 233568 }, + { url = "https://files.pythonhosted.org/packages/97/d9/d00bb9277a9165a5e6d60f2142cd1a38a750045c9c12e47ae087f686d781/propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4", size = 229874 }, + { url = "https://files.pythonhosted.org/packages/8e/78/c123cf22469bdc4b18efb78893e69c70a8b16de88e6160b69ca6bdd88b5d/propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c", size = 225857 }, + { url = "https://files.pythonhosted.org/packages/31/1b/fd6b2f1f36d028820d35475be78859d8c89c8f091ad30e377ac49fd66359/propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887", size = 227604 }, + { url = "https://files.pythonhosted.org/packages/99/36/b07be976edf77a07233ba712e53262937625af02154353171716894a86a6/propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57", size = 238430 }, + { url = "https://files.pythonhosted.org/packages/0d/64/5822f496c9010e3966e934a011ac08cac8734561842bc7c1f65586e0683c/propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23", size = 244814 }, + { url = "https://files.pythonhosted.org/packages/fd/bd/8657918a35d50b18a9e4d78a5df7b6c82a637a311ab20851eef4326305c1/propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348", size = 235922 }, + { url = "https://files.pythonhosted.org/packages/a8/6f/ec0095e1647b4727db945213a9f395b1103c442ef65e54c62e92a72a3f75/propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5", size = 40177 }, + { url = "https://files.pythonhosted.org/packages/20/a2/bd0896fdc4f4c1db46d9bc361c8c79a9bf08ccc08ba054a98e38e7ba1557/propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3", size = 44446 }, + { url = "https://files.pythonhosted.org/packages/a8/a7/5f37b69197d4f558bfef5b4bceaff7c43cc9b51adf5bd75e9081d7ea80e4/propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7", size = 78120 }, + { url = "https://files.pythonhosted.org/packages/c8/cd/48ab2b30a6b353ecb95a244915f85756d74f815862eb2ecc7a518d565b48/propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763", size = 45127 }, + { url = "https://files.pythonhosted.org/packages/a5/ba/0a1ef94a3412aab057bd996ed5f0ac7458be5bf469e85c70fa9ceb43290b/propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d", size = 44419 }, + { url = "https://files.pythonhosted.org/packages/b4/6c/ca70bee4f22fa99eacd04f4d2f1699be9d13538ccf22b3169a61c60a27fa/propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a", size = 229611 }, + { url = "https://files.pythonhosted.org/packages/19/70/47b872a263e8511ca33718d96a10c17d3c853aefadeb86dc26e8421184b9/propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b", size = 234005 }, + { url = "https://files.pythonhosted.org/packages/4f/be/3b0ab8c84a22e4a3224719099c1229ddfdd8a6a1558cf75cb55ee1e35c25/propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb", size = 237270 }, + { url = "https://files.pythonhosted.org/packages/04/d8/f071bb000d4b8f851d312c3c75701e586b3f643fe14a2e3409b1b9ab3936/propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf", size = 231877 }, + { url = "https://files.pythonhosted.org/packages/93/e7/57a035a1359e542bbb0a7df95aad6b9871ebee6dce2840cb157a415bd1f3/propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2", size = 217848 }, + { url = "https://files.pythonhosted.org/packages/f0/93/d1dea40f112ec183398fb6c42fde340edd7bab202411c4aa1a8289f461b6/propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f", size = 216987 }, + { url = "https://files.pythonhosted.org/packages/62/4c/877340871251145d3522c2b5d25c16a1690ad655fbab7bb9ece6b117e39f/propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136", size = 212451 }, + { url = "https://files.pythonhosted.org/packages/7c/bb/a91b72efeeb42906ef58ccf0cdb87947b54d7475fee3c93425d732f16a61/propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325", size = 212879 }, + { url = "https://files.pythonhosted.org/packages/9b/7f/ee7fea8faac57b3ec5d91ff47470c6c5d40d7f15d0b1fccac806348fa59e/propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44", size = 222288 }, + { url = "https://files.pythonhosted.org/packages/ff/d7/acd67901c43d2e6b20a7a973d9d5fd543c6e277af29b1eb0e1f7bd7ca7d2/propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83", size = 228257 }, + { url = "https://files.pythonhosted.org/packages/8d/6f/6272ecc7a8daad1d0754cfc6c8846076a8cb13f810005c79b15ce0ef0cf2/propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544", size = 221075 }, + { url = "https://files.pythonhosted.org/packages/7c/bd/c7a6a719a6b3dd8b3aeadb3675b5783983529e4a3185946aa444d3e078f6/propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032", size = 39654 }, + { url = "https://files.pythonhosted.org/packages/88/e7/0eef39eff84fa3e001b44de0bd41c7c0e3432e7648ffd3d64955910f002d/propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e", size = 43705 }, + { url = "https://files.pythonhosted.org/packages/3d/b6/e6d98278f2d49b22b4d033c9f792eda783b9ab2094b041f013fc69bcde87/propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036", size = 11603 }, +] + +[[package]] +name = "psutil" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, +] + +[[package]] +name = "pycodestyle" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/b3/c832123f2699892c715fcdfebb1a8fdeffa11bb7b2350e46ecdd76b45a20/pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef", size = 103640 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/cc/227251b1471f129bc35e966bb0fceb005969023926d744139642d847b7ae/pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", size = 41725 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/ab/67eda485b025e9253cce0eaede9b6158a08f62af7013a883b2c8775917b2/pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6", size = 349141 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/42/47f16b1d3f6cf2c3a8e6b8c63680d38d25144803426eb893665a6384bedd/pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7", size = 2832515 }, + { url = "https://files.pythonhosted.org/packages/4b/44/439860148466c6a541a2916fc379a5730b16ef3c7d433e30a6041d36d7bb/pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b", size = 2497474 }, + { url = "https://files.pythonhosted.org/packages/1d/99/128bae7beff5cd5636f41a49bc1f58e5aaeb186d5f674ae4b2eb88608127/pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663", size = 3077188 }, + { url = "https://files.pythonhosted.org/packages/da/dd/dff4860e552dbf84b8525e291617408a3ee32024f93147e74a989b5977ee/pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f", size = 3109064 }, + { url = "https://files.pythonhosted.org/packages/4b/75/56c04c68c364cdb6dbb534b5f4ef032b802a892841070f8139bd6f6c9935/pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046", size = 3153833 }, + { url = "https://files.pythonhosted.org/packages/c9/79/f25ee40671ddf76219d38bea0bdee63bdae09cb89cff61cb67c04db58ffd/pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca", size = 3103044 }, + { url = "https://files.pythonhosted.org/packages/18/9c/c84ead4e65e85dbb3b9806e8390db91b82993f5248fdfe9dacdd4da9c726/pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f", size = 2110889 }, + { url = "https://files.pythonhosted.org/packages/b6/5d/4ec16c2158b934ce2b082073cea5e90bbdb76172050dc565425a0a76beec/pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c", size = 158858 }, +] + +[[package]] +name = "pyflakes" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/0f/0dc480da9162749bf629dca76570972dd9cce5bedc60196a3c912875c87d/pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db", size = 68567 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/11/2a745612f1d3cbbd9c69ba14b1b43a35a2f5c3c81cd0124508c52c64307f/pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3", size = 68805 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pyjwt" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344 }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pyproject-api" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/19/441e0624a8afedd15bbcce96df1b80479dd0ff0d965f5ce8fde4f2f6ffad/pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496", size = 22340 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/f4/3c4ddfcc0c19c217c6de513842d286de8021af2f2ab79bbb86c00342d778/pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228", size = 13100 }, +] + +[[package]] +name = "pytest" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, +] + +[[package]] +name = "pytest-random-order" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/e5/89654b4354b10e89969a74130f391b017dbdc113ce27f0e8ff9fa23e44e1/pytest-random-order-1.1.1.tar.gz", hash = "sha256:4472d7d34f1f1c5f3a359c4ffc5c13ed065232f31eca19c8844c1ab406e79080", size = 14626 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/02/944cf846bcd6027a1805c69fec90581f916e99ccafcbe409ae6c76833255/pytest_random_order-1.1.1-py3-none-any.whl", hash = "sha256:882727a8b597ecd06ede28654ffeb8a6d511a1e4abe1054cca7982f2e42008cd", size = 11521 }, +] + +[[package]] +name = "pytest-vcr" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "vcrpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/60/104c619483c1a42775d3f8b27293f1ecfc0728014874d065e68cb9702d49/pytest-vcr-1.0.2.tar.gz", hash = "sha256:23ee51b75abbcc43d926272773aae4f39f93aceb75ed56852d0bf618f92e1896", size = 3810 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/d3/ff520d11e6ee400602711d1ece8168dcfc5b6d8146fb7db4244a6ad6a9c3/pytest_vcr-1.0.2-py2.py3-none-any.whl", hash = "sha256:2f316e0539399bea0296e8b8401145c62b6f85e9066af7e57b6151481b0d6d9c", size = 4137 }, +] + +[[package]] +name = "python-baseconv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/33/d0/9297d7d8dd74767b4d5560d834b30b2fff17d39987c23ed8656f476e0d9b/python-baseconv-1.2.2.tar.gz", hash = "sha256:0539f8bd0464013b05ad62e0a1673f0ac9086c76b43ebf9f833053527cd9931b", size = 4929 } + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/e5/af35f7ea75cf72f2cd079c95ee16797de7cd71f29ea7c68ae5ce7be1eda0/PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", size = 125201 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/0d/26fb23e8863e0aeaac0c64e03fd27367ad2ae3f3cccf3798ee98ce160368/PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", size = 187867 }, + { url = "https://files.pythonhosted.org/packages/28/09/55f715ddbf95a054b764b547f617e22f1d5e45d83905660e9a088078fe67/PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", size = 167530 }, + { url = "https://files.pythonhosted.org/packages/5e/94/7d5ee059dfb92ca9e62f4057dcdec9ac08a9e42679644854dc01177f8145/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", size = 732244 }, + { url = "https://files.pythonhosted.org/packages/06/92/e0224aa6ebf9dc54a06a4609da37da40bb08d126f5535d81bff6b417b2ae/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", size = 752871 }, + { url = "https://files.pythonhosted.org/packages/7b/5e/efd033ab7199a0b2044dab3b9f7a4f6670e6a52c089de572e928d2873b06/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", size = 757729 }, + { url = "https://files.pythonhosted.org/packages/03/5c/c4671451b2f1d76ebe352c0945d4cd13500adb5d05f5a51ee296d80152f7/PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", size = 748528 }, + { url = "https://files.pythonhosted.org/packages/73/9c/766e78d1efc0d1fca637a6b62cea1b4510a7fb93617eb805223294fef681/PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", size = 130286 }, + { url = "https://files.pythonhosted.org/packages/b3/34/65bb4b2d7908044963ebf614fe0fdb080773fc7030d7e39c8d3eddcd4257/PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", size = 144699 }, + { url = "https://files.pythonhosted.org/packages/bc/06/1b305bf6aa704343be85444c9d011f626c763abb40c0edc1cad13bfd7f86/PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", size = 178692 }, + { url = "https://files.pythonhosted.org/packages/84/02/404de95ced348b73dd84f70e15a41843d817ff8c1744516bf78358f2ffd2/PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", size = 165622 }, + { url = "https://files.pythonhosted.org/packages/c7/4c/4a2908632fc980da6d918b9de9c1d9d7d7e70b2672b1ad5166ed27841ef7/PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", size = 696937 }, + { url = "https://files.pythonhosted.org/packages/b4/33/720548182ffa8344418126017aa1d4ab4aeec9a2275f04ce3f3573d8ace8/PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", size = 724969 }, + { url = "https://files.pythonhosted.org/packages/4f/78/77b40157b6cb5f2d3d31a3d9b2efd1ba3505371f76730d267e8b32cf4b7f/PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", size = 712604 }, + { url = "https://files.pythonhosted.org/packages/2e/97/3e0e089ee85e840f4b15bfa00e4e63d84a3691ababbfea92d6f820ea6f21/PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", size = 126098 }, + { url = "https://files.pythonhosted.org/packages/2b/9f/fbade56564ad486809c27b322d0f7e6a89c01f6b4fe208402e90d4443a99/PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", size = 138675 }, +] + +[[package]] +name = "referencing" +version = "0.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, +] + +[[package]] +name = "requests" +version = "2.29.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/d2/70fc708727b62d55bc24e43cc85f073039023212d482553d853c44e57bdb/requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059", size = 108279 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/e1/2aa539876d9ed0ddc95882451deb57cfd7aa8dbf0b8dbce68e045549ba56/requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b", size = 62499 }, +] + +[[package]] +name = "requests-futures" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/07/9140eb28a74f5ee0f256b8c99981f6d21f9f60af5721ca694176fd080687/requests-futures-1.0.1.tar.gz", hash = "sha256:f55a4ef80070e2858e7d1e73123d2bfaeaf25b93fd34384d8ddf148e2b676373", size = 9921 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/ca/ca664ed374fab67535482532c3c05bb5dbe5850e7dff2491eb827c318e48/requests_futures-1.0.1-py2.py3-none-any.whl", hash = "sha256:4a2f5472e9911a79532137d156aa937cd9cd90fec55677f71b2976d1f7a66d38", size = 7597 }, +] + +[[package]] +name = "responses" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "requests" }, + { name = "types-pyyaml" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/4f/5033bf66528c832e7fcc48e76f540bf401302c55041c7fb488b4fbaaec4a/responses-0.23.1.tar.gz", hash = "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f", size = 72966 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/6a/64c85e69c6a7b02e828ed193b2fc15e3ff6581f87501666b98feabc54809/responses-0.23.1-py3-none-any.whl", hash = "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd", size = 52083 }, +] + +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + +[[package]] +name = "robotframework" +version = "7.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/85/824b95cd3fcaf0eb6c353481b415871da4186e6414ba06a99772a48b960e/robotframework-7.1.1.zip", hash = "sha256:f85919c68c4d0837006e5f09dde1ef689f082eba2e7e64d5758753f9ee8bfea9", size = 761336 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/3c/a1f0971f4405c5accea879e84be91fb98956d778ff1cfc232410fc8558ae/robotframework-7.1.1-py3-none-any.whl", hash = "sha256:0461360be00dfb8ce1ab3f42370fa6eea3779e41c0b8d79a1f8ddcd2ec8e3679", size = 730648 }, +] + +[[package]] +name = "robotframework-pabot" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "natsort" }, + { name = "robotframework" }, + { name = "robotframework-stacktrace" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/66/6e6905d688e72f5c7b8a596d75940cae6572dfae8f1000d7ae3bb64d68ce/robotframework-pabot-2.18.0.tar.gz", hash = "sha256:3d870d98156cecd81f9a8d88deaa2174aac808d81ca1c11c561a817b0dbaa404", size = 46505 } + +[[package]] +name = "robotframework-pythonlibcore" +version = "4.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/89/5dc8c8186c897ee4b7d0b2631ebc90e679e8c8f04ea85505f96ad38aad64/robotframework-pythonlibcore-4.4.1.tar.gz", hash = "sha256:2d695b2ea906f5815179643e29182466675e682d82c5fa9d1009edfae2f84b16", size = 12835 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/64/47d8403c7c0af89b46461640a2f67e49a5778062b8dd6eb3e128aa3c50cc/robotframework_pythonlibcore-4.4.1-py2.py3-none-any.whl", hash = "sha256:e0517129522aaa039eb2a28fd3d9720b7a0be0b90d0cbcb153a6c8016bb9e973", size = 12452 }, +] + +[[package]] +name = "robotframework-requests" +version = "0.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "robotframework" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/e2/b17b940985e7b35f53767d908897870fcf4e143a2a7c2da76d152e4abc4c/robotframework-requests-0.9.7.tar.gz", hash = "sha256:c2a2839813e1dc6b299e7d336314c9982c225c5b7e001ec893dc3555c6a95740", size = 19404 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/18/03ef4d1132f50b55220af5308db91e1cc5481b8b063cac5fafa625b00f64/robotframework_requests-0.9.7-py3-none-any.whl", hash = "sha256:96315066318778cbcf5523cdb6175f5a0b8fec33275030a20dade3a3d98aeca2", size = 21055 }, +] + +[[package]] +name = "robotframework-seleniumlibrary" +version = "5.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "robotframework" }, + { name = "robotframework-pythonlibcore" }, + { name = "selenium" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/75/fe0184ba697a585d80457b74b7bed1bb290501cd6f9883d149efb4a3d9f2/robotframework-seleniumlibrary-5.1.3.tar.gz", hash = "sha256:f51a0068c6c0d8107ee1120874a3afbf2bbe751fd0782cb86a27a616d9ca30b6", size = 156935 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/d4/ca1de2d557d16fdb56916c6168d2bfccff9debc0f9823ba1f6ecdf0d5e53/robotframework_seleniumlibrary-5.1.3-py2.py3-none-any.whl", hash = "sha256:7c8211b870249db53dbb2091a5a36c09aa657f06405d112587d37d33fff7454e", size = 94652 }, +] + +[[package]] +name = "robotframework-stacktrace" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "robotframework" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/cf/6e6934c3d037ef3f5914e88494127ec4d0fea73bd566539e08b9fa2c9324/robotframework-stacktrace-0.4.1.tar.gz", hash = "sha256:e96cb36e7e9ab55104c1f7d3606249a109e0a4c3bb6a0e294bff07d54ee6f6a5", size = 12634 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/f2/be7d680eb8a23e86ea288f6c421e1e8c30c823a8f1521dc6b9f9d7b7692b/robotframework_stacktrace-0.4.1-py3-none-any.whl", hash = "sha256:018d7a55b99733e64e3cc0b134771b61a47de61de23609ed35c7bf0a53e9290e", size = 8543 }, +] + +[[package]] +name = "rpds-py" +version = "0.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/cb/8e919951f55d109d658f81c9b49d0cc3b48637c50792c5d2e77032b8c5da/rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350", size = 25931 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/2e/a6ded84019a05b8f23e0fe6a632f62ae438a8c5e5932d3dfc90c73418414/rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75", size = 327194 }, + { url = "https://files.pythonhosted.org/packages/68/11/d3f84c69de2b2086be3d6bd5e9d172825c096b13842ab7e5f8f39f06035b/rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712", size = 318126 }, + { url = "https://files.pythonhosted.org/packages/18/c0/13f1bce9c901511e5e4c0b77a99dbb946bb9a177ca88c6b480e9cb53e304/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a", size = 361119 }, + { url = "https://files.pythonhosted.org/packages/06/31/3bd721575671f22a37476c2d7b9e34bfa5185bdcee09f7fedde3b29f3adb/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93", size = 369532 }, + { url = "https://files.pythonhosted.org/packages/20/22/3eeb0385f33251b4fd0f728e6a3801dc8acc05e714eb7867cefe635bf4ab/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751", size = 403703 }, + { url = "https://files.pythonhosted.org/packages/10/e1/8dde6174e7ac5b9acd3269afca2e17719bc7e5088c68f44874d2ad9e4560/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535", size = 429868 }, + { url = "https://files.pythonhosted.org/packages/19/51/a3cc1a5238acfc2582033e8934d034301f9d4931b9bf7c7ccfabc4ca0880/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0", size = 360539 }, + { url = "https://files.pythonhosted.org/packages/cd/8c/3c87471a44bd4114e2b0aec90f298f6caaac4e8db6af904d5dd2279f5c61/rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e", size = 382467 }, + { url = "https://files.pythonhosted.org/packages/d0/9b/95073fe3e0f130e6d561e106818b6568ef1f2df3352e7f162ab912da837c/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8", size = 546669 }, + { url = "https://files.pythonhosted.org/packages/de/4c/7ab3669e02bb06fedebcfd64d361b7168ba39dfdf385e4109440f2e7927b/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4", size = 549304 }, + { url = "https://files.pythonhosted.org/packages/f1/e8/ad5da336cd42adbdafe0ecd40dcecdae01fd3d703c621c7637615a008d3a/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3", size = 527637 }, + { url = "https://files.pythonhosted.org/packages/02/f1/1b47b9e5b941c2659c9b7e4ef41b6f07385a6500c638fa10c066e4616ecb/rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732", size = 200488 }, + { url = "https://files.pythonhosted.org/packages/85/f6/c751c1adfa31610055acfa1cc667cf2c2d7011a73070679c448cf5856905/rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84", size = 218475 }, + { url = "https://files.pythonhosted.org/packages/e7/10/4e8dcc08b58a548098dbcee67a4888751a25be7a6dde0a83d4300df48bfa/rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17", size = 329749 }, + { url = "https://files.pythonhosted.org/packages/d2/e4/61144f3790e12fd89e6153d77f7915ad26779735fef8ee9c099cba6dfb4a/rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c", size = 321032 }, + { url = "https://files.pythonhosted.org/packages/fa/e0/99205aabbf3be29ef6c58ef9b08feed51ba6532fdd47461245cb58dd9897/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d", size = 363931 }, + { url = "https://files.pythonhosted.org/packages/ac/bd/bce2dddb518b13a7e77eed4be234c9af0c9c6d403d01c5e6ae8eb447ab62/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f", size = 373343 }, + { url = "https://files.pythonhosted.org/packages/43/15/112b7c553066cb91264691ba7fb119579c440a0ae889da222fa6fc0d411a/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01", size = 406304 }, + { url = "https://files.pythonhosted.org/packages/af/8d/2da52aef8ae5494a382b0c0025ba5b68f2952db0f2a4c7534580e8ca83cc/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a", size = 423022 }, + { url = "https://files.pythonhosted.org/packages/c8/1b/f23015cb293927c93bdb4b94a48bfe77ad9d57359c75db51f0ff0cf482ff/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb", size = 364937 }, + { url = "https://files.pythonhosted.org/packages/7b/8b/6da8636b2ea2e2f709e56656e663b6a71ecd9a9f9d9dc21488aade122026/rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa", size = 386301 }, + { url = "https://files.pythonhosted.org/packages/20/af/2ae192797bffd0d6d558145b5a36e7245346ff3e44f6ddcb82f0eb8512d4/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc", size = 549452 }, + { url = "https://files.pythonhosted.org/packages/07/dd/9f6520712a5108cd7d407c9db44a3d59011b385c58e320d58ebf67757a9e/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd", size = 554370 }, + { url = "https://files.pythonhosted.org/packages/5e/0e/b1bdc7ea0db0946d640ab8965146099093391bb5d265832994c47461e3c5/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5", size = 530940 }, + { url = "https://files.pythonhosted.org/packages/ae/d3/ffe907084299484fab60a7955f7c0e8a295c04249090218c59437010f9f4/rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c", size = 203164 }, + { url = "https://files.pythonhosted.org/packages/1f/ba/9cbb57423c4bfbd81c473913bebaed151ad4158ee2590a4e4b3e70238b48/rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb", size = 220750 }, + { url = "https://files.pythonhosted.org/packages/b5/01/fee2e1d1274c92fff04aa47d805a28d62c2aa971d1f49f5baea1c6e670d9/rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e", size = 329359 }, + { url = "https://files.pythonhosted.org/packages/b0/cf/4aeffb02b7090029d7aeecbffb9a10e1c80f6f56d7e9a30e15481dc4099c/rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c", size = 320543 }, + { url = "https://files.pythonhosted.org/packages/17/69/85cf3429e9ccda684ba63ff36b5866d5f9451e921cc99819341e19880334/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc", size = 363107 }, + { url = "https://files.pythonhosted.org/packages/ef/de/7df88dea9c3eeb832196d23b41f0f6fc5f9a2ee9b2080bbb1db8731ead9c/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8", size = 372027 }, + { url = "https://files.pythonhosted.org/packages/d1/b8/88675399d2038580743c570a809c43a900e7090edc6553f8ffb66b23c965/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d", size = 405031 }, + { url = "https://files.pythonhosted.org/packages/e1/aa/cca639f6d17caf00bab51bdc70fcc0bdda3063e5662665c4fdf60443c474/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982", size = 422271 }, + { url = "https://files.pythonhosted.org/packages/c4/07/bf8a949d2ec4626c285579c9d6b356c692325f1a4126e947736b416e1fc4/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496", size = 363625 }, + { url = "https://files.pythonhosted.org/packages/11/f0/06675c6a58d6ce34547879138810eb9aab0c10e5607ea6c2e4dc56b703c8/rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4", size = 385906 }, + { url = "https://files.pythonhosted.org/packages/bf/ac/2d1f50374eb8e41030fad4e87f81751e1c39e3b5d4bee8c5618830d8a6ac/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7", size = 549021 }, + { url = "https://files.pythonhosted.org/packages/f7/d4/a7d70a7cc71df772eeadf4bce05e32e780a9fe44a511a5b091c7a85cb767/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a", size = 553800 }, + { url = "https://files.pythonhosted.org/packages/87/81/dc30bc449ccba63ad23a0f6633486d4e0e6955f45f3715a130dacabd6ad0/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb", size = 531076 }, + { url = "https://files.pythonhosted.org/packages/50/80/fb62ab48f3b5cfe704ead6ad372da1922ddaa76397055e02eb507054c979/rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782", size = 202804 }, + { url = "https://files.pythonhosted.org/packages/d9/30/a3391e76d0b3313f33bdedd394a519decae3a953d2943e3dabf80ae32447/rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e", size = 220502 }, +] + +[[package]] +name = "rst2ansi" +version = "0.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/19/b29bc04524e7d1dbde13272fbb67e45a8eb24bb6d112cf10c46162b350d7/rst2ansi-0.1.5.tar.gz", hash = "sha256:1b17fb9a628d40f57933ad1a3aa952346444be069469508e73e95060da33fe6f", size = 9989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/cb/fdb216f2b8bbec9c43655a79f2f280b2ba7822b2c8396ecceafa0c232320/rst2ansi-0.1.5-py3-none-any.whl", hash = "sha256:b2cf192e38975918d07540bba7d673550cd7d28ca7443410984e22d5ab058fb3", size = 18414 }, +] + +[[package]] +name = "salesforce-bulk" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "simple-salesforce" }, + { name = "six" }, + { name = "unicodecsv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/45/ae6a5566997e180755b0e02dac3374ae10071f44300c917a3958a41d324a/salesforce-bulk-2.2.0.tar.gz", hash = "sha256:6894e2f0d1b7a719388bbc425e1874cc096a3cc80106e93098a672709ac5ff4e", size = 12305 } + +[[package]] +name = "sarge" +version = "0.1.7.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/03/937f1f848ffe04c48e2dd0bd6c93da1a583d58695bb74a8957650ea6f0d0/sarge-0.1.7.post1.tar.gz", hash = "sha256:64ff42ae6ef90acbded6318ed440ed63b31a669302fb60cf41265debea282a3d", size = 25736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/fd/691a7a847559747d122a25f454b6dc4eb2a83c23ba44d161a1fdff5ede92/sarge-0.1.7.post1-py2.py3-none-any.whl", hash = "sha256:6da81592eac3fdb55708baddaf28deaad3a18f8719e3c082ea3b0405647ae72c", size = 18506 }, +] + +[[package]] +name = "secretstorage" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/a4/f48c9d79cb507ed1373477dbceaba7401fd8a23af63b837fa61f1dcd3691/SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", size = 19739 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99", size = 15221 }, +] + +[[package]] +name = "selenium" +version = "3.141.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/9c/9030520bf6ff0b4c98988448a93c04fcbd5b13cd9520074d8ed53569ccfe/selenium-3.141.0.tar.gz", hash = "sha256:deaf32b60ad91a4611b98d8002757f29e6f2c2d5fcaf202e1c9ad06d6772300d", size = 854669 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/d6/4294f0b4bce4de0abf13e17190289f9d0613b0a44e5dd6a7f5ca98459853/selenium-3.141.0-py2.py3-none-any.whl", hash = "sha256:2d7131d7bc5a5b99a2d9b04aaf2612c411b03b8ca1b1ee8d3de5845a9be2cb3c", size = 904577 }, +] + +[[package]] +name = "setuptools" +version = "75.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/73/c1ccf3e057ef6331cc6861412905dc218203bde46dfe8262c1631aa7fb11/setuptools-75.4.0.tar.gz", hash = "sha256:1dc484f5cf56fd3fe7216d7b8df820802e7246cfb534a1db2aa64f14fcb9cdcb", size = 1336593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/df/7c6bb83dcb45b35dc35b310d752f254211cde0bcd2a35290ea6e2862b2a9/setuptools-75.4.0-py3-none-any.whl", hash = "sha256:b3c5d862f98500b06ffdf7cc4499b48c46c317d8d56cb30b5c8bce4d88f5c216", size = 1223131 }, +] + +[[package]] +name = "simple-salesforce" +version = "1.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/a8/9f3832c5229df89e115de112f57ae2d6b567ec47a885cce87c752f453423/simple-salesforce-1.11.4.tar.gz", hash = "sha256:3768fe40d04daa74409acccd9934fcf833697c6b239d9bf52d7f87a99efbe41e", size = 33101 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/84/777a88fc20615a0e42af2853f156fb436f09d0cdad64be584afa2312a8f4/simple_salesforce-1.11.4-py2.py3-none-any.whl", hash = "sha256:fbfa2940070007853d4ad437ac6064bfa55b20750f28e360cae72597450c36e5", size = 30314 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, +] + +[[package]] +name = "snowfakery" +version = "4.0.0" +source = { directory = "../Snowfakery" } +dependencies = [ + { name = "click" }, + { name = "faker" }, + { name = "faker-edu" }, + { name = "faker-nonprofit" }, + { name = "gvgen" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "python-baseconv" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "setuptools" }, + { name = "sqlalchemy" }, +] + +[package.metadata] +requires-dist = [ + { name = "click" }, + { name = "faker" }, + { name = "faker-edu" }, + { name = "faker-nonprofit" }, + { name = "gvgen" }, + { name = "jinja2" }, + { name = "pydantic", specifier = "<2.0.0" }, + { name = "python-baseconv" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "setuptools", specifier = ">=75.4.0" }, + { name = "sqlalchemy", specifier = "<3" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "black", specifier = ">=24.10.0" }, + { name = "coverage", specifier = ">=7.6.4" }, + { name = "coveralls", specifier = ">=4.0.1" }, + { name = "diff-cover", specifier = ">=9.2.0" }, + { name = "faker-microservice", specifier = ">=2.0.0" }, + { name = "jsonschema", specifier = ">=4.23.0" }, + { name = "mkdocs", specifier = ">=1.6.1" }, + { name = "mkdocs-exclude-search", specifier = ">=0.6.6" }, + { name = "pre-commit", specifier = ">=4.0.1" }, + { name = "pyright", specifier = ">=1.1.388" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "pytest-cov", specifier = ">=6.0.0" }, + { name = "pytest-vcr", specifier = ">=1.0.2" }, + { name = "responses", specifier = ">=0.23.1" }, + { name = "vcrpy", specifier = ">=6.0.2" }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/b2/02a43597980903483fe5eb081ee8e0ba2bb62ea43a70499484343795f3bf/Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5", size = 6811365 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/a7/01dd6fd9653c056258d65032aa09a615b5d7b07dd840845a9f41a8860fbc/sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d", size = 3183160 }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496 }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/df/45e827f4d7e7fcc84e853bcef1d836effd762d63ccb86f43ede4e98b478c/sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e", size = 24766 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/c1/5e2cafbd03105ce50d8500f9b4e8a6e8d02e22d0475b574c3b3e9451a15f/sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228", size = 120601 }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/33/dc28393f16385f722c893cb55539c641c9aaec8d1bc1c15b69ce0ac2dbb3/sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4", size = 17398 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/09/5de5ed43a521387f18bdf5f5af31d099605c992fd25372b2b9b825ce48ee/sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", size = 84690 }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/47/64cff68ea3aa450c373301e5bebfbb9fce0a3e70aca245fcadd4af06cd75/sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff", size = 27967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/ee/a1f5e39046cbb5f8bc8fba87d1ddf1c6643fbc9194e58d26e606de4b9074/sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903", size = 99833 }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/8e/c4846e59f38a5f2b4a0e3b27af38f2fcf904d4bfd82095bf92de0b114ebd/sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", size = 21658 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/14/05f9206cf4e9cfca1afb5fd224c7cd434dcc3a433d6d9e4e0264d29c6cdb/sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6", size = 90609 }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/72/835d6fadb9e5d02304cf39b18f93d227cd93abd3c41ebf58e6853eeb1455/sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952", size = 21019 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/77/5464ec50dd0f1c1037e3c93249b040c8fc8078fdda97530eeb02424b6eea/sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd", size = 94021 }, +] + +[[package]] +name = "sqlalchemy" +version = "1.4.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(platform_machine == 'AMD64' and python_full_version >= '3.11') or (platform_machine == 'WIN32' and python_full_version >= '3.11') or (platform_machine == 'aarch64' and python_full_version >= '3.11') or (platform_machine == 'amd64' and python_full_version >= '3.11') or (platform_machine == 'ppc64le' and python_full_version >= '3.11') or (platform_machine == 'win32' and python_full_version >= '3.11') or (platform_machine == 'x86_64' and python_full_version >= '3.11')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/a4/b5991829c34af0505e0f2b1ccf9588d1ba90f2d984ee208c90c985f1265a/SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296", size = 8514200 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/50/d5756b1faa3c727bc3e2601ee1975c00e6adbafbbc436bea1e87af86328d/SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d", size = 1588505 }, + { url = "https://files.pythonhosted.org/packages/81/05/73cb4865011f85fc3c4af8af06dc66b50527208f83c90b807071abba8da1/SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0", size = 1628161 }, + { url = "https://files.pythonhosted.org/packages/a9/5b/3afbd03f813b7ba929887d0d1107b54e7bad4e3a10664ab21a05eb777149/SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb", size = 1626252 }, + { url = "https://files.pythonhosted.org/packages/78/5b/9eda3191ff1b9e101addc4f67df6c4a2836569f36f2e0117abe362e65b33/SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e", size = 1589454 }, + { url = "https://files.pythonhosted.org/packages/a3/65/bede5ab82a258c4a55a5cbfacbfc74e3ca2b82e61085a815f919fa1660be/SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12", size = 1591519 }, + { url = "https://files.pythonhosted.org/packages/fc/30/7e04f16d0508d4e57edd5c8def5810bb31bc73203beacd8bf83ed18ff0f1/SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c", size = 1589216 }, + { url = "https://files.pythonhosted.org/packages/ce/e6/9da1e081321a514c0147a2e0b293f27ca93f0f299cbd5ba746a9422a9f07/SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554", size = 1628827 }, + { url = "https://files.pythonhosted.org/packages/10/c1/1613a8dcd05e6dacc9505554ce6c217a1cfda0da9c7592e258856945c6b6/SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b", size = 1627867 }, + { url = "https://files.pythonhosted.org/packages/0e/a7/97e7893673165b41dacfb07476df83a2fb5c9445feea5e54ad6ed3d27cb5/SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126", size = 1589871 }, + { url = "https://files.pythonhosted.org/packages/49/62/d0e4502e27eaa10da35243d5241c3be3ed3974d607281e3b4ccc065d9853/SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64", size = 1591783 }, +] + +[[package]] +name = "testfixtures" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/25/d7e9d05f87e2ab84657a0dfb1f24fc295d542ac2eb221531d976ea4aa1ff/testfixtures-8.3.0.tar.gz", hash = "sha256:d4c0b84af2f267610f908009b50d6f983a4e58ade22c67bab6787b5a402d59c0", size = 137420 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/4e/699671ba484b94bda0959b281ff59b24f728263befd13e060fa038ce3bc8/testfixtures-8.3.0-py3-none-any.whl", hash = "sha256:3d1e0e0005c4d6ac2a2ab27916704c6471047f0d2f78f2e54adf20abdacc7b10", size = 105085 }, +] + +[[package]] +name = "tomli" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 }, +] + +[[package]] +name = "tox" +version = "4.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "chardet" }, + { name = "colorama" }, + { name = "filelock" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "pluggy" }, + { name = "pyproject-api" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/4a/55f9dba99aad874ae54a7fb2310c940e978fd0155eb3576ddebec000fca7/tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5", size = 181364 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/ee/6f9bf37f197578f98fb450f1aeebf4570f85b24b00d846bbde6e11489bd1/tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34", size = 157087 }, +] + +[[package]] +name = "typeguard" +version = "2.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/38/c61bfcf62a7b572b5e9363a802ff92559cb427ee963048e1442e3aef7490/typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", size = 40604 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/bb/d43e5c75054e53efce310e79d63df0ac3f25e34c926be5dffb7d283fb2a8/typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1", size = 17605 }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240917" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/7d/a95df0a11f95c8f48d7683f03e4aed1a2c0fc73e9de15cca4d38034bea1a/types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587", size = 12381 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2c/c1d81d680997d24b0542aa336f0a65bd7835e5224b7670f33a7d617da379/types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570", size = 15264 }, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926 }, +] + +[[package]] +name = "unicodecsv" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz", hash = "sha256:018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc", size = 10267 } + +[[package]] +name = "uritemplate" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56226815abfa72fe0aa81c33bed16ed045647d6000eba/uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0", size = 273898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356 }, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/39/64487bf07df2ed854cc06078c27c0d0abc59bd27b32232876e403c333a08/urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0", size = 305687 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/53/aa91e163dcfd1e5b82d8a890ecf13314e3e149c05270cc644581f77f17fd/urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", size = 143835 }, +] + +[[package]] +name = "vcrpy" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "urllib3", marker = "python_full_version >= '3.11'" }, + { name = "wrapt" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/4e/fff59599826793f9e3460c22c0af0377abb27dc9781a7d5daca8cb03da25/vcrpy-6.0.2.tar.gz", hash = "sha256:88e13d9111846745898411dbc74a75ce85870af96dd320d75f1ee33158addc09", size = 85472 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/ed/25d19705791d3fccc84423d564695421a75b4e08e8ab15a004a49068742d/vcrpy-6.0.2-py2.py3-none-any.whl", hash = "sha256:40370223861181bc76a5e5d4b743a95058bb1ad516c3c08570316ab592f56cad", size = 42431 }, +] + +[[package]] +name = "virtualenv" +version = "20.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/b3/7b6a79c5c8cf6d90ea681310e169cf2db2884f4d583d16c6e1d5a75a4e04/virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba", size = 6491145 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/92/78324ff89391e00c8f4cf6b8526c41c6ef36b4ea2d2c132250b1a6fc2b8d/virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4", size = 3117838 }, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313 }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890 }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118 }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746 }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556 }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523 }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614 }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316 }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322 }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055 }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291 }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374 }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896 }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738 }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568 }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653 }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362 }, +] + +[[package]] +name = "xmltodict" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981 }, +] + +[[package]] +name = "yarl" +version = "1.15.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/e1/d5427a061819c9f885f58bb0467d02a523f1aec19f9e5f9c82ce950d90d3/yarl-1.15.2.tar.gz", hash = "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", size = 169318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/59/3ae125c97a2a8571ea16fdf59fcbd288bc169e0005d1af9946a90ea831d9/yarl-1.15.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", size = 136492 }, + { url = "https://files.pythonhosted.org/packages/f9/2b/efa58f36b582db45b94c15e87803b775eb8a4ca0db558121a272e67f3564/yarl-1.15.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", size = 88614 }, + { url = "https://files.pythonhosted.org/packages/82/69/eb73c0453a2ff53194df485dc7427d54e6cb8d1180fcef53251a8e24d069/yarl-1.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", size = 86607 }, + { url = "https://files.pythonhosted.org/packages/48/4e/89beaee3a4da0d1c6af1176d738cff415ff2ad3737785ee25382409fe3e3/yarl-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", size = 334077 }, + { url = "https://files.pythonhosted.org/packages/da/e8/8fcaa7552093f94c3f327783e2171da0eaa71db0c267510898a575066b0f/yarl-1.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", size = 347365 }, + { url = "https://files.pythonhosted.org/packages/be/fa/dc2002f82a89feab13a783d3e6b915a3a2e0e83314d9e3f6d845ee31bfcc/yarl-1.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", size = 344823 }, + { url = "https://files.pythonhosted.org/packages/ae/c8/c4a00fe7f2aa6970c2651df332a14c88f8baaedb2e32d6c3b8c8a003ea74/yarl-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", size = 337132 }, + { url = "https://files.pythonhosted.org/packages/07/bf/84125f85f44bf2af03f3cf64e87214b42cd59dcc8a04960d610a9825f4d4/yarl-1.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", size = 326258 }, + { url = "https://files.pythonhosted.org/packages/00/19/73ad8122b2fa73fe22e32c24b82a6c053cf6c73e2f649b73f7ef97bee8d0/yarl-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", size = 336212 }, + { url = "https://files.pythonhosted.org/packages/39/1d/2fa4337d11f6587e9b7565f84eba549f2921494bc8b10bfe811079acaa70/yarl-1.15.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", size = 330397 }, + { url = "https://files.pythonhosted.org/packages/39/ab/dce75e06806bcb4305966471ead03ce639d8230f4f52c32bd614d820c044/yarl-1.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", size = 334985 }, + { url = "https://files.pythonhosted.org/packages/c1/98/3f679149347a5e34c952bf8f71a387bc96b3488fae81399a49f8b1a01134/yarl-1.15.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", size = 356033 }, + { url = "https://files.pythonhosted.org/packages/f7/8c/96546061c19852d0a4b1b07084a58c2e8911db6bcf7838972cff542e09fb/yarl-1.15.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", size = 357710 }, + { url = "https://files.pythonhosted.org/packages/01/45/ade6fb3daf689816ebaddb3175c962731edf300425c3254c559b6d0dcc27/yarl-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", size = 345532 }, + { url = "https://files.pythonhosted.org/packages/e7/d7/8de800d3aecda0e64c43e8fc844f7effc8731a6099fa0c055738a2247504/yarl-1.15.2-cp311-cp311-win32.whl", hash = "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", size = 78250 }, + { url = "https://files.pythonhosted.org/packages/3a/6c/69058bbcfb0164f221aa30e0cd1a250f6babb01221e27c95058c51c498ca/yarl-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", size = 84492 }, + { url = "https://files.pythonhosted.org/packages/e0/d1/17ff90e7e5b1a0b4ddad847f9ec6a214b87905e3a59d01bff9207ce2253b/yarl-1.15.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", size = 136721 }, + { url = "https://files.pythonhosted.org/packages/44/50/a64ca0577aeb9507f4b672f9c833d46cf8f1e042ce2e80c11753b936457d/yarl-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", size = 88954 }, + { url = "https://files.pythonhosted.org/packages/c9/0a/a30d0b02046d4088c1fd32d85d025bd70ceb55f441213dee14d503694f41/yarl-1.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", size = 86692 }, + { url = "https://files.pythonhosted.org/packages/06/0b/7613decb8baa26cba840d7ea2074bd3c5e27684cbcb6d06e7840d6c5226c/yarl-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", size = 325762 }, + { url = "https://files.pythonhosted.org/packages/97/f5/b8c389a58d1eb08f89341fc1bbcc23a0341f7372185a0a0704dbdadba53a/yarl-1.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", size = 335037 }, + { url = "https://files.pythonhosted.org/packages/cb/f9/d89b93a7bb8b66e01bf722dcc6fec15e11946e649e71414fd532b05c4d5d/yarl-1.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", size = 334221 }, + { url = "https://files.pythonhosted.org/packages/10/77/1db077601998e0831a540a690dcb0f450c31f64c492e993e2eaadfbc7d31/yarl-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", size = 330167 }, + { url = "https://files.pythonhosted.org/packages/3b/c2/e5b7121662fd758656784fffcff2e411c593ec46dc9ec68e0859a2ffaee3/yarl-1.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", size = 317472 }, + { url = "https://files.pythonhosted.org/packages/c6/f3/41e366c17e50782651b192ba06a71d53500cc351547816bf1928fb043c4f/yarl-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", size = 330896 }, + { url = "https://files.pythonhosted.org/packages/79/a2/d72e501bc1e33e68a5a31f584fe4556ab71a50a27bfd607d023f097cc9bb/yarl-1.15.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", size = 328787 }, + { url = "https://files.pythonhosted.org/packages/9d/ba/890f7e1ea17f3c247748548eee876528ceb939e44566fa7d53baee57e5aa/yarl-1.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", size = 332631 }, + { url = "https://files.pythonhosted.org/packages/48/c7/27b34206fd5dfe76b2caa08bf22f9212b2d665d5bb2df8a6dd3af498dcf4/yarl-1.15.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", size = 344023 }, + { url = "https://files.pythonhosted.org/packages/88/e7/730b130f4f02bd8b00479baf9a57fdea1dc927436ed1d6ba08fa5c36c68e/yarl-1.15.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", size = 352290 }, + { url = "https://files.pythonhosted.org/packages/84/9b/e8dda28f91a0af67098cddd455e6b540d3f682dda4c0de224215a57dee4a/yarl-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", size = 343742 }, + { url = "https://files.pythonhosted.org/packages/66/47/b1c6bb85f2b66decbe189e27fcc956ab74670a068655df30ef9a2e15c379/yarl-1.15.2-cp312-cp312-win32.whl", hash = "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", size = 78051 }, + { url = "https://files.pythonhosted.org/packages/7d/9e/1a897e5248ec53e96e9f15b3e6928efd5e75d322c6cf666f55c1c063e5c9/yarl-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", size = 84313 }, + { url = "https://files.pythonhosted.org/packages/46/ab/be3229898d7eb1149e6ba7fe44f873cf054d275a00b326f2a858c9ff7175/yarl-1.15.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", size = 135006 }, + { url = "https://files.pythonhosted.org/packages/10/10/b91c186b1b0e63951f80481b3e6879bb9f7179d471fe7c4440c9e900e2a3/yarl-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", size = 88121 }, + { url = "https://files.pythonhosted.org/packages/bf/1d/4ceaccf836b9591abfde775e84249b847ac4c6c14ee2dd8d15b5b3cede44/yarl-1.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", size = 85967 }, + { url = "https://files.pythonhosted.org/packages/93/bd/c924f22bdb2c5d0ca03a9e64ecc5e041aace138c2a91afff7e2f01edc3a1/yarl-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", size = 325615 }, + { url = "https://files.pythonhosted.org/packages/59/a5/6226accd5c01cafd57af0d249c7cf9dd12569cd9c78fbd93e8198e7a9d84/yarl-1.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", size = 334945 }, + { url = "https://files.pythonhosted.org/packages/4c/c1/cc6ccdd2bcd0ff7291602d5831754595260f8d2754642dfd34fef1791059/yarl-1.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", size = 336701 }, + { url = "https://files.pythonhosted.org/packages/ef/ff/39a767ee249444e4b26ea998a526838238f8994c8f274befc1f94dacfb43/yarl-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", size = 330977 }, + { url = "https://files.pythonhosted.org/packages/dd/ba/b1fed73f9d39e3e7be8f6786be5a2ab4399c21504c9168c3cadf6e441c2e/yarl-1.15.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", size = 317402 }, + { url = "https://files.pythonhosted.org/packages/82/e8/03e3ebb7f558374f29c04868b20ca484d7997f80a0a191490790a8c28058/yarl-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", size = 331776 }, + { url = "https://files.pythonhosted.org/packages/1f/83/90b0f4fd1ecf2602ba4ac50ad0bbc463122208f52dd13f152bbc0d8417dd/yarl-1.15.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", size = 331585 }, + { url = "https://files.pythonhosted.org/packages/c7/f6/1ed7e7f270ae5f9f1174c1f8597b29658f552fee101c26de8b2eb4ca147a/yarl-1.15.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", size = 336395 }, + { url = "https://files.pythonhosted.org/packages/e0/3a/4354ed8812909d9ec54a92716a53259b09e6b664209231f2ec5e75f4820d/yarl-1.15.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", size = 342810 }, + { url = "https://files.pythonhosted.org/packages/de/cc/39e55e16b1415a87f6d300064965d6cfb2ac8571e11339ccb7dada2444d9/yarl-1.15.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", size = 351441 }, + { url = "https://files.pythonhosted.org/packages/fb/19/5cd4757079dc9d9f3de3e3831719b695f709a8ce029e70b33350c9d082a7/yarl-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", size = 345875 }, + { url = "https://files.pythonhosted.org/packages/83/a0/ef09b54634f73417f1ea4a746456a4372c1b044f07b26e16fa241bd2d94e/yarl-1.15.2-cp313-cp313-win32.whl", hash = "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", size = 302609 }, + { url = "https://files.pythonhosted.org/packages/20/9f/f39c37c17929d3975da84c737b96b606b68c495cc4ee86408f10523a1635/yarl-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", size = 308252 }, + { url = "https://files.pythonhosted.org/packages/46/cf/a28c494decc9c8776b0d7b729c68d26fdafefcedd8d2eab5d9cd767376b2/yarl-1.15.2-py3-none-any.whl", hash = "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", size = 38891 }, +] + +[[package]] +name = "zipp" +version = "3.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 }, +] From 45084a48eb7d3d997c643eddac2b74da1e6b9bec Mon Sep 17 00:00:00 2001 From: James Estevez Date: Wed, 13 Nov 2024 13:42:59 -0800 Subject: [PATCH 34/65] Update to snowfakery v4 --- uv.lock | 839 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 436 insertions(+), 403 deletions(-) diff --git a/uv.lock b/uv.lock index 3e3ba17d5b..ddb238bec1 100644 --- a/uv.lock +++ b/uv.lock @@ -7,11 +7,11 @@ resolution-markers = [ [[package]] name = "alabaster" -version = "0.7.13" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/71/a8ee96d1fd95ca04a0d2e2d9c4081dac4c2d2b12f7ddb899c8cb9bfd1532/alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2", size = 11454 } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/88/c7083fc61120ab661c5d0b82cb77079fc1429d3f913a456c1c82cf4658f7/alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3", size = 13857 }, + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, ] [[package]] @@ -67,7 +67,7 @@ wheels = [ [[package]] name = "black" -version = "24.8.0" +version = "24.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -76,17 +76,21 @@ dependencies = [ { name = "pathspec" }, { name = "platformdirs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/b0/46fb0d4e00372f4a86a6f8efa3cb193c9f64863615e39010b1477e010578/black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", size = 644810 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/a6/0a3aa89de9c283556146dc6dbda20cd63a9c94160a6fbdebaf0918e4a3e1/black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1", size = 1615080 }, - { url = "https://files.pythonhosted.org/packages/db/94/b803d810e14588bb297e565821a947c108390a079e21dbdcb9ab6956cd7a/black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", size = 1438143 }, - { url = "https://files.pythonhosted.org/packages/a5/b5/f485e1bbe31f768e2e5210f52ea3f432256201289fd1a3c0afda693776b0/black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", size = 1738774 }, - { url = "https://files.pythonhosted.org/packages/a8/69/a000fc3736f89d1bdc7f4a879f8aaf516fb03613bb51a0154070383d95d9/black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", size = 1427503 }, - { url = "https://files.pythonhosted.org/packages/a2/a8/05fb14195cfef32b7c8d4585a44b7499c2a4b205e1662c427b941ed87054/black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", size = 1646132 }, - { url = "https://files.pythonhosted.org/packages/41/77/8d9ce42673e5cb9988f6df73c1c5c1d4e9e788053cccd7f5fb14ef100982/black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", size = 1448665 }, - { url = "https://files.pythonhosted.org/packages/cc/94/eff1ddad2ce1d3cc26c162b3693043c6b6b575f538f602f26fe846dfdc75/black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", size = 1762458 }, - { url = "https://files.pythonhosted.org/packages/28/ea/18b8d86a9ca19a6942e4e16759b2fa5fc02bbc0eb33c1b866fcd387640ab/black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", size = 1436109 }, - { url = "https://files.pythonhosted.org/packages/27/1e/83fa8a787180e1632c3d831f7e58994d7aaf23a0961320d21e84f922f919/black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", size = 206504 }, + { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, + { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, + { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, + { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, + { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, + { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, + { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, + { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, + { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, ] [[package]] @@ -100,11 +104,11 @@ wheels = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/da/e94e26401b62acd6d91df2b52954aceb7f561743aa5ccc32152886c76c96/certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", size = 164886 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/06/a07f096c664aeb9f01624f858c3add0a4e913d6c96257acb4fce61e7de14/certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1", size = 163774 }, + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, ] [[package]] @@ -172,41 +176,56 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647 }, - { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434 }, - { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979 }, - { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582 }, - { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645 }, - { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398 }, - { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273 }, - { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577 }, - { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747 }, - { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375 }, - { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474 }, - { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232 }, - { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859 }, - { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509 }, - { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870 }, - { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892 }, - { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213 }, - { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404 }, - { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275 }, - { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518 }, - { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182 }, - { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869 }, - { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042 }, - { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275 }, - { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819 }, - { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415 }, - { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212 }, - { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167 }, - { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041 }, - { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397 }, - { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543 }, +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, + { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, + { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, + { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, + { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, + { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, + { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, + { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, + { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, + { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, + { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, + { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, + { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, + { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, ] [[package]] @@ -232,50 +251,50 @@ wheels = [ [[package]] name = "coverage" -version = "7.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, - { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, - { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, - { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, - { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, - { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, - { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, - { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, - { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, - { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, - { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, - { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, - { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, - { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, - { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, - { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, - { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, - { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, - { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, - { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, - { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, - { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, - { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, - { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, - { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, - { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, - { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, - { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, - { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, - { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, - { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, - { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, - { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, - { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, - { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, - { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, - { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, - { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, - { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, - { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, +version = "7.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/12/3669b6382792783e92046730ad3327f53b2726f0603f4c311c4da4824222/coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73", size = 798716 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/31/9c0cf84f0dfcbe4215b7eb95c31777cdc0483c13390e69584c8150c85175/coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b", size = 206819 }, + { url = "https://files.pythonhosted.org/packages/53/ed/a38401079ad320ad6e054a01ec2b61d270511aeb3c201c80e99c841229d5/coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25", size = 207263 }, + { url = "https://files.pythonhosted.org/packages/20/e7/c3ad33b179ab4213f0d70da25a9c214d52464efa11caeab438592eb1d837/coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546", size = 239205 }, + { url = "https://files.pythonhosted.org/packages/36/91/fc02e8d8e694f557752120487fd982f654ba1421bbaa5560debf96ddceda/coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b", size = 236612 }, + { url = "https://files.pythonhosted.org/packages/cc/57/cb08f0eda0389a9a8aaa4fc1f9fec7ac361c3e2d68efd5890d7042c18aa3/coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e", size = 238479 }, + { url = "https://files.pythonhosted.org/packages/d5/c9/2c7681a9b3ca6e6f43d489c2e6653a53278ed857fd6e7010490c307b0a47/coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718", size = 237405 }, + { url = "https://files.pythonhosted.org/packages/b5/4e/ebfc6944b96317df8b537ae875d2e57c27b84eb98820bc0a1055f358f056/coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db", size = 236038 }, + { url = "https://files.pythonhosted.org/packages/13/f2/3a0bf1841a97c0654905e2ef531170f02c89fad2555879db8fe41a097871/coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522", size = 236812 }, + { url = "https://files.pythonhosted.org/packages/b9/9c/66bf59226b52ce6ed9541b02d33e80a6e816a832558fbdc1111a7bd3abd4/coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf", size = 209400 }, + { url = "https://files.pythonhosted.org/packages/2a/a0/b0790934c04dfc8d658d4a62acb8f7ca0efdf3818456fcad757b11c6479d/coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19", size = 210243 }, + { url = "https://files.pythonhosted.org/packages/7d/e7/9291de916d084f41adddfd4b82246e68d61d6a75747f075f7e64628998d2/coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2", size = 207013 }, + { url = "https://files.pythonhosted.org/packages/27/03/932c2c5717a7fa80cd43c6a07d3177076d97b79f12f40f882f9916db0063/coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117", size = 207251 }, + { url = "https://files.pythonhosted.org/packages/d5/3f/0af47dcb9327f65a45455fbca846fe96eb57c153af46c4754a3ba678938a/coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613", size = 240268 }, + { url = "https://files.pythonhosted.org/packages/8a/3c/37a9d81bbd4b23bc7d46ca820e16174c613579c66342faa390a271d2e18b/coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27", size = 237298 }, + { url = "https://files.pythonhosted.org/packages/c0/70/6b0627e5bd68204ee580126ed3513140b2298995c1233bd67404b4e44d0e/coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52", size = 239367 }, + { url = "https://files.pythonhosted.org/packages/3c/eb/634d7dfab24ac3b790bebaf9da0f4a5352cbc125ce6a9d5c6cf4c6cae3c7/coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2", size = 238853 }, + { url = "https://files.pythonhosted.org/packages/d9/0d/8e3ed00f1266ef7472a4e33458f42e39492e01a64281084fb3043553d3f1/coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1", size = 237160 }, + { url = "https://files.pythonhosted.org/packages/ce/9c/4337f468ef0ab7a2e0887a9c9da0e58e2eada6fc6cbee637a4acd5dfd8a9/coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5", size = 238824 }, + { url = "https://files.pythonhosted.org/packages/5e/09/3e94912b8dd37251377bb02727a33a67ee96b84bbbe092f132b401ca5dd9/coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17", size = 209639 }, + { url = "https://files.pythonhosted.org/packages/01/69/d4f3a4101171f32bc5b3caec8ff94c2c60f700107a6aaef7244b2c166793/coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08", size = 210428 }, + { url = "https://files.pythonhosted.org/packages/c2/4d/2dede4f7cb5a70fb0bb40a57627fddf1dbdc6b9c1db81f7c4dcdcb19e2f4/coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9", size = 207039 }, + { url = "https://files.pythonhosted.org/packages/3f/f9/d86368ae8c79e28f1fb458ebc76ae9ff3e8bd8069adc24e8f2fed03c58b7/coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba", size = 207298 }, + { url = "https://files.pythonhosted.org/packages/64/c5/b4cc3c3f64622c58fbfd4d8b9a7a8ce9d355f172f91fcabbba1f026852f6/coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c", size = 239813 }, + { url = "https://files.pythonhosted.org/packages/8a/86/14c42e60b70a79b26099e4d289ccdfefbc68624d096f4481163085aa614c/coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06", size = 236959 }, + { url = "https://files.pythonhosted.org/packages/7f/f8/4436a643631a2fbab4b44d54f515028f6099bfb1cd95b13cfbf701e7f2f2/coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f", size = 238950 }, + { url = "https://files.pythonhosted.org/packages/49/50/1571810ddd01f99a0a8be464a4ac8b147f322cd1e8e296a1528984fc560b/coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b", size = 238610 }, + { url = "https://files.pythonhosted.org/packages/f3/8c/6312d241fe7cbd1f0cade34a62fea6f333d1a261255d76b9a87074d8703c/coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21", size = 236697 }, + { url = "https://files.pythonhosted.org/packages/ce/5f/fef33dfd05d87ee9030f614c857deb6df6556b8f6a1c51bbbb41e24ee5ac/coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a", size = 238541 }, + { url = "https://files.pythonhosted.org/packages/a9/64/6a984b6e92e1ea1353b7ffa08e27f707a5e29b044622445859200f541e8c/coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e", size = 209707 }, + { url = "https://files.pythonhosted.org/packages/5c/60/ce5a9e942e9543783b3db5d942e0578b391c25cdd5e7f342d854ea83d6b7/coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963", size = 210439 }, + { url = "https://files.pythonhosted.org/packages/78/53/6719677e92c308207e7f10561a1b16ab8b5c00e9328efc9af7cfd6fb703e/coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f", size = 207784 }, + { url = "https://files.pythonhosted.org/packages/fa/dd/7054928930671fcb39ae6a83bb71d9ab5f0afb733172543ced4b09a115ca/coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806", size = 208058 }, + { url = "https://files.pythonhosted.org/packages/b5/7d/fd656ddc2b38301927b9eb3aae3fe827e7aa82e691923ed43721fd9423c9/coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11", size = 250772 }, + { url = "https://files.pythonhosted.org/packages/90/d0/eb9a3cc2100b83064bb086f18aedde3afffd7de6ead28f69736c00b7f302/coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3", size = 246490 }, + { url = "https://files.pythonhosted.org/packages/45/44/3f64f38f6faab8a0cfd2c6bc6eb4c6daead246b97cf5f8fc23bf3788f841/coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a", size = 248848 }, + { url = "https://files.pythonhosted.org/packages/5d/11/4c465a5f98656821e499f4b4619929bd5a34639c466021740ecdca42aa30/coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc", size = 248340 }, + { url = "https://files.pythonhosted.org/packages/f1/96/ebecda2d016cce9da812f404f720ca5df83c6b29f65dc80d2000d0078741/coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70", size = 246229 }, + { url = "https://files.pythonhosted.org/packages/16/d9/3d820c00066ae55d69e6d0eae11d6149a5ca7546de469ba9d597f01bf2d7/coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef", size = 247510 }, + { url = "https://files.pythonhosted.org/packages/8f/c3/4fa1eb412bb288ff6bfcc163c11700ff06e02c5fad8513817186e460ed43/coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e", size = 210353 }, + { url = "https://files.pythonhosted.org/packages/7e/77/03fc2979d1538884d921c2013075917fc927f41cd8526909852fe4494112/coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1", size = 211502 }, ] [package.optional-dependencies] @@ -314,7 +333,7 @@ wheels = [ [[package]] name = "cumulusci" -version = "3.93.0" +version = "4.0.0" source = { editable = "." } dependencies = [ { name = "click" }, @@ -407,7 +426,7 @@ requires-dist = [ { name = "sarge" }, { name = "selenium", specifier = "<4" }, { name = "simple-salesforce", specifier = "==1.11.4" }, - { name = "snowfakery", directory = "../Snowfakery" }, + { name = "snowfakery", specifier = ">=4.0.0" }, { name = "sqlalchemy", specifier = "<2" }, { name = "xmltodict" }, ] @@ -459,11 +478,11 @@ wheels = [ [[package]] name = "docutils" -version = "0.16" +version = "0.21.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc", size = 1962041 } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/44/8a15e45ffa96e6cf82956dd8d7af9e666357e16b0d93b253903475ee947f/docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af", size = 548181 }, + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, ] [[package]] @@ -480,14 +499,15 @@ wheels = [ [[package]] name = "faker" -version = "24.4.0" +version = "32.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/18/87b247323b6fbc006d5579aa4e783d43f29b3becf71fcb212fda3e647621/Faker-24.4.0.tar.gz", hash = "sha256:a5ddccbe97ab691fad6bd8036c31f5697cfaa550e62e000078d1935fa8a7ec2e", size = 1724351 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/2a/dd2c8f55d69013d0eee30ec4c998250fb7da957f5fe860ed077b3df1725b/faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5", size = 1850193 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/ac/b070934c1f1b7a6deefe5f4cda2a6ba988277765065d658f2d7e2bbb79dd/Faker-24.4.0-py3-none-any.whl", hash = "sha256:998c29ee7d64429bd59204abffa9ba11f784fb26c7b9df4def78d1a70feb36a7", size = 1762024 }, + { url = "https://files.pythonhosted.org/packages/7e/fa/4a82dea32d6262a96e6841cdd4a45c11ac09eecdff018e745565410ac70e/Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814", size = 1889123 }, ] [[package]] @@ -553,7 +573,7 @@ wheels = [ [[package]] name = "furo" -version = "2023.3.27" +version = "2024.8.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, @@ -561,9 +581,9 @@ dependencies = [ { name = "sphinx" }, { name = "sphinx-basic-ng" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/21/233938933f1629a4933c8fce2f803cc8fd211ca563ea4337cb44920bbbfa/furo-2023.3.27.tar.gz", hash = "sha256:b99e7867a5cc833b2b34d7230631dd6558c7a29f93071fdbb5709634bb33c5a5", size = 1636618 } +sdist = { url = "https://files.pythonhosted.org/packages/a0/e2/d351d69a9a9e4badb4a5be062c2d0e87bd9e6c23b5e57337fef14bef34c8/furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01", size = 1661506 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/8c/fa66eb31b1b89b9208269f1fea5edcbecd52b274e5c7afadb9152fb3d4ca/furo-2023.3.27-py3-none-any.whl", hash = "sha256:4ab2be254a2d5e52792d0ca793a12c35582dd09897228a6dd47885dabd5c9521", size = 327605 }, + { url = "https://files.pythonhosted.org/packages/27/48/e791a7ed487dbb9729ef32bb5d1af16693d8925f4366befef54119b2e576/furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c", size = 341333 }, ] [[package]] @@ -583,28 +603,44 @@ wheels = [ [[package]] name = "greenlet" -version = "3.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", size = 182013 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/20/68a278a6f93fa36e21cfc3d7599399a8a831225644eb3b6b18755cd3d6fc/greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61", size = 271666 }, - { url = "https://files.pythonhosted.org/packages/21/b4/90e06e07c78513ab03855768200bdb35c8e764e805b3f14fb488e56f82dc/greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559", size = 657689 }, - { url = "https://files.pythonhosted.org/packages/f6/a2/0ed21078039072f9dc738bbf3af12b103a84106b1385ac4723841f846ce7/greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e", size = 673009 }, - { url = "https://files.pythonhosted.org/packages/42/11/42ad6b1104c357826bbee7d7b9e4f24dbd9fde94899a03efb004aab62963/greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33", size = 667432 }, - { url = "https://files.pythonhosted.org/packages/bb/6b/384dee7e0121cbd1757bdc1824a5ee28e43d8d4e3f99aa59521f629442fe/greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379", size = 667442 }, - { url = "https://files.pythonhosted.org/packages/c6/1f/12d5a6cc26e8b483c2e7975f9c22e088ac735c0d8dcb8a8f72d31a4e5f04/greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22", size = 620032 }, - { url = "https://files.pythonhosted.org/packages/c7/ec/85b647e59e0f137c7792a809156f413e38379cf7f3f2e1353c37f4be4026/greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3", size = 1154218 }, - { url = "https://files.pythonhosted.org/packages/94/ed/1e5f4bca691a81700e5a88e86d6f0e538acb10188cd2cc17140e523255ef/greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d", size = 1180754 }, - { url = "https://files.pythonhosted.org/packages/47/79/26d54d7d700ef65b689fc2665a40846d13e834da0486674a8d4f0f371a47/greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728", size = 292822 }, - { url = "https://files.pythonhosted.org/packages/a2/2f/461615adc53ba81e99471303b15ac6b2a6daa8d2a0f7f77fd15605e16d5b/greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be", size = 273085 }, - { url = "https://files.pythonhosted.org/packages/e9/55/2c3cfa3cdbb940cf7321fbcf544f0e9c74898eed43bf678abf416812d132/greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e", size = 660514 }, - { url = "https://files.pythonhosted.org/packages/38/77/efb21ab402651896c74f24a172eb4d7479f9f53898bd5e56b9e20bb24ffd/greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676", size = 674295 }, - { url = "https://files.pythonhosted.org/packages/74/3a/92f188ace0190f0066dca3636cf1b09481d0854c46e92ec5e29c7cefe5b1/greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc", size = 669395 }, - { url = "https://files.pythonhosted.org/packages/63/0f/847ed02cdfce10f0e6e3425cd054296bddb11a17ef1b34681fa01a055187/greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230", size = 670455 }, - { url = "https://files.pythonhosted.org/packages/bd/37/56b0da468a85e7704f3b2bc045015301bdf4be2184a44868c71f6dca6fe2/greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf", size = 625692 }, - { url = "https://files.pythonhosted.org/packages/7c/68/b5f4084c0a252d7e9c0d95fc1cfc845d08622037adb74e05be3a49831186/greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305", size = 1152597 }, - { url = "https://files.pythonhosted.org/packages/a4/fa/31e22345518adcd69d1d6ab5087a12c178aa7f3c51103f6d5d702199d243/greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6", size = 1181043 }, - { url = "https://files.pythonhosted.org/packages/53/80/3d94d5999b4179d91bcc93745d1b0815b073d61be79dd546b840d17adb18/greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2", size = 293635 }, +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 }, + { url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 }, + { url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 }, + { url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 }, + { url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 }, + { url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 }, + { url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 }, + { url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 }, + { url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 }, + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, ] [[package]] @@ -618,20 +654,20 @@ wheels = [ [[package]] name = "identify" -version = "2.6.1" +version = "2.6.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097 } +sdist = { url = "https://files.pythonhosted.org/packages/02/79/7a520fc5011e02ca3f3285b5f6820eaf80443eb73e3733f73c02fb42ba0b/identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd", size = 99113 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972 }, + { url = "https://files.pythonhosted.org/packages/e0/86/c4395700f3c5475424fb5c41e20c16be28d10c904aee4d005ba3217fc8e7/identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3", size = 98982 }, ] [[package]] name = "idna" -version = "3.6" +version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] [[package]] @@ -684,14 +720,14 @@ wheels = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/5e/3a21abf3cd467d7876045335e681d276ac32492febe6d98ad89562d1a7e1/Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90", size = 268261 } +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/6d/6de6be2d02603ab56e72997708809e8a5b0fbfee080735109b40a3564843/Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa", size = 133236 }, + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, ] [[package]] @@ -711,14 +747,14 @@ wheels = [ [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b9/cc0cc592e7c195fb8a650c1d5990b10175cf13b4c97465c72ec841de9e4b/jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc", size = 13983 } +sdist = { url = "https://files.pythonhosted.org/packages/10/db/58f950c996c793472e336ff3655b13fbcf1e3b359dcf52dcf3ed3b52c352/jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272", size = 15561 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/07/44bd408781594c4d0a027666ef27fab1e441b109dc3b76b4f836f8fd04fe/jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c", size = 18482 }, + { url = "https://files.pythonhosted.org/packages/d1/0f/8910b19ac0670a0f80ce1008e5e751c4a57e14d2c4c13a482aa6079fa9d6/jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf", size = 18459 }, ] [[package]] @@ -797,42 +833,62 @@ wheels = [ [[package]] name = "markdown-it-py" -version = "2.2.0" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/c0/59bd6d0571986f72899288a95d9d6178d0eebd70b6650f1bb3f0da90f8f7/markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1", size = 67120 } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/25/2d88e8feee8e055d015343f9b86e370a1ccbec546f2865c98397aaef24af/markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", size = 84466 }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, ] [[package]] name = "markupsafe" -version = "2.1.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, - { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, - { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, - { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, - { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, - { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, - { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, - { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, - { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, - { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, - { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, - { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, - { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, - { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, - { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, - { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, - { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, - { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, - { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, - { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, ] [[package]] @@ -846,14 +902,14 @@ wheels = [ [[package]] name = "mdit-py-plugins" -version = "0.3.5" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/e7/cc2720da8a32724b36d04c6dba5644154cdf883a1482b3bbb81959a642ed/mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a", size = 39871 } +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/4c/a9b222f045f98775034d243198212cbea36d3524c3ee1e8ab8c0346d6953/mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e", size = 52087 }, + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316 }, ] [[package]] @@ -930,7 +986,7 @@ wheels = [ [[package]] name = "myst-parser" -version = "1.0.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docutils" }, @@ -940,9 +996,9 @@ dependencies = [ { name = "pyyaml" }, { name = "sphinx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/69/fbddb50198c6b0901a981e72ae30f1b7769d2dfac88071f7df41c946d133/myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae", size = 84224 } +sdist = { url = "https://files.pythonhosted.org/packages/85/55/6d1741a1780e5e65038b74bce6689da15f620261c490c3511eb4c12bac4b/myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531", size = 93858 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/1f/1621ef434ac5da26c30d31fcca6d588e3383344902941713640ba717fa87/myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c", size = 77312 }, + { url = "https://files.pythonhosted.org/packages/ca/b4/b036f8fdb667587bb37df29dc6644681dd78b7a2a6321a34684b79412b28/myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d", size = 84563 }, ] [[package]] @@ -1001,7 +1057,7 @@ wheels = [ [[package]] name = "pre-commit" -version = "3.5.0" +version = "4.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, @@ -1010,9 +1066,9 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/b3/4ae08d21eb097162f5aad37f4585f8069a86402ed7f5362cc9ae097f9572/pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32", size = 177079 } +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/75/526915fedf462e05eeb1c75ceaf7e3f9cde7b5ce6f62740fe5f7f19a0050/pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660", size = 203698 }, + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, ] [[package]] @@ -1107,21 +1163,28 @@ wheels = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.19" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/ab/67eda485b025e9253cce0eaede9b6158a08f62af7013a883b2c8775917b2/pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6", size = 349141 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/2d/df30554721cdad26b241b7a92e726dd1c3716d90c92915731eb00e17a9f7/pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10", size = 355208 } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/42/47f16b1d3f6cf2c3a8e6b8c63680d38d25144803426eb893665a6384bedd/pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7", size = 2832515 }, - { url = "https://files.pythonhosted.org/packages/4b/44/439860148466c6a541a2916fc379a5730b16ef3c7d433e30a6041d36d7bb/pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b", size = 2497474 }, - { url = "https://files.pythonhosted.org/packages/1d/99/128bae7beff5cd5636f41a49bc1f58e5aaeb186d5f674ae4b2eb88608127/pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663", size = 3077188 }, - { url = "https://files.pythonhosted.org/packages/da/dd/dff4860e552dbf84b8525e291617408a3ee32024f93147e74a989b5977ee/pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f", size = 3109064 }, - { url = "https://files.pythonhosted.org/packages/4b/75/56c04c68c364cdb6dbb534b5f4ef032b802a892841070f8139bd6f6c9935/pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046", size = 3153833 }, - { url = "https://files.pythonhosted.org/packages/c9/79/f25ee40671ddf76219d38bea0bdee63bdae09cb89cff61cb67c04db58ffd/pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca", size = 3103044 }, - { url = "https://files.pythonhosted.org/packages/18/9c/c84ead4e65e85dbb3b9806e8390db91b82993f5248fdfe9dacdd4da9c726/pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f", size = 2110889 }, - { url = "https://files.pythonhosted.org/packages/b6/5d/4ec16c2158b934ce2b082073cea5e90bbdb76172050dc565425a0a76beec/pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c", size = 158858 }, + { url = "https://files.pythonhosted.org/packages/d9/e7/c3276090605233eeda49e3f290ef6e8dc59962f883fa7934455996986d67/pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3", size = 2582314 }, + { url = "https://files.pythonhosted.org/packages/79/4c/fea1176272425a1b972db48b5b2582165095f22d88d4a249f02439dcd3e5/pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34", size = 2269582 }, + { url = "https://files.pythonhosted.org/packages/85/e5/34b62732fa683d1171be07fb40f0bab3fb35bc52e56bfcae1629aee236c4/pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3", size = 3088988 }, + { url = "https://files.pythonhosted.org/packages/f5/23/be131d6162cd2c4f7f29cf0a881c0e9bdbf7c37010803f8a85010bf016bf/pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3", size = 3120098 }, + { url = "https://files.pythonhosted.org/packages/f1/72/7cf7dfc8e68098751a5cee8969a967dad2acf9ce460963d071296bdeee81/pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98", size = 3164823 }, + { url = "https://files.pythonhosted.org/packages/43/09/c7eb4c39faf7f01ebaed3fae8bf0b31388f2f7ffcefb07b2e5b9ea0f0617/pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526", size = 3115412 }, + { url = "https://files.pythonhosted.org/packages/80/a7/f9ecaaf940193a68d9566e2e61edce5f57d75591e59ff07a6af5fa7fb56f/pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08", size = 2119019 }, + { url = "https://files.pythonhosted.org/packages/dc/bb/4883d3957b10b814b3bd7b7e8d51274f756e243e5eebd2f1cda36d933a32/pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890", size = 2410108 }, + { url = "https://files.pythonhosted.org/packages/86/80/752f888be6b068727fb893d4d875ef1cc6bb3ed3dc382f33a019fc26598a/pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555", size = 2166105 }, + { url = "https://files.pythonhosted.org/packages/af/d4/346e56049cbc5ca429a1590bd0ab47cc154b1dec9e85fc920f7d5e50c889/pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e", size = 2800443 }, + { url = "https://files.pythonhosted.org/packages/c5/73/e1934973bf8bf436f1e1e365ed48dc51da5d7ba8b88dcd2239c962a267e0/pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206", size = 2830897 }, + { url = "https://files.pythonhosted.org/packages/87/5e/90e3e3c8bd70012986c22aa5f291aab948bdf419ca694833872594ff99ea/pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186", size = 2863147 }, + { url = "https://files.pythonhosted.org/packages/50/c2/95be3fdfafdaf49d09369a46bbcf1f22494765479b44436e954837b818cc/pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086", size = 2823662 }, + { url = "https://files.pythonhosted.org/packages/00/ba/8b1c91cc27428b5e9d340abf99f82c01987eb638937ff64ae0166354bff0/pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e", size = 1950372 }, + { url = "https://files.pythonhosted.org/packages/a4/68/99ebf43b6b0321175cff0a05f0ce7fa51a8de67d390ccb8ab0d534be86a9/pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f", size = 165863 }, ] [[package]] @@ -1185,15 +1248,15 @@ wheels = [ [[package]] name = "pytest-cov" -version = "5.0.0" +version = "6.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +sdist = { url = "https://files.pythonhosted.org/packages/be/45/9b538de8cef30e17c7b45ef42f538a94889ed6a16f2387a6c89e73220651/pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0", size = 66945 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, + { url = "https://files.pythonhosted.org/packages/36/3b/48e79f2cd6a61dbbd4807b4ed46cb564b4fd50a76166b1c4ea5c1d9e2371/pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", size = 22949 }, ] [[package]] @@ -1259,25 +1322,37 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/e5/af35f7ea75cf72f2cd079c95ee16797de7cd71f29ea7c68ae5ce7be1eda0/PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", size = 125201 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/0d/26fb23e8863e0aeaac0c64e03fd27367ad2ae3f3cccf3798ee98ce160368/PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007", size = 187867 }, - { url = "https://files.pythonhosted.org/packages/28/09/55f715ddbf95a054b764b547f617e22f1d5e45d83905660e9a088078fe67/PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab", size = 167530 }, - { url = "https://files.pythonhosted.org/packages/5e/94/7d5ee059dfb92ca9e62f4057dcdec9ac08a9e42679644854dc01177f8145/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d", size = 732244 }, - { url = "https://files.pythonhosted.org/packages/06/92/e0224aa6ebf9dc54a06a4609da37da40bb08d126f5535d81bff6b417b2ae/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc", size = 752871 }, - { url = "https://files.pythonhosted.org/packages/7b/5e/efd033ab7199a0b2044dab3b9f7a4f6670e6a52c089de572e928d2873b06/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673", size = 757729 }, - { url = "https://files.pythonhosted.org/packages/03/5c/c4671451b2f1d76ebe352c0945d4cd13500adb5d05f5a51ee296d80152f7/PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b", size = 748528 }, - { url = "https://files.pythonhosted.org/packages/73/9c/766e78d1efc0d1fca637a6b62cea1b4510a7fb93617eb805223294fef681/PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741", size = 130286 }, - { url = "https://files.pythonhosted.org/packages/b3/34/65bb4b2d7908044963ebf614fe0fdb080773fc7030d7e39c8d3eddcd4257/PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34", size = 144699 }, - { url = "https://files.pythonhosted.org/packages/bc/06/1b305bf6aa704343be85444c9d011f626c763abb40c0edc1cad13bfd7f86/PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28", size = 178692 }, - { url = "https://files.pythonhosted.org/packages/84/02/404de95ced348b73dd84f70e15a41843d817ff8c1744516bf78358f2ffd2/PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9", size = 165622 }, - { url = "https://files.pythonhosted.org/packages/c7/4c/4a2908632fc980da6d918b9de9c1d9d7d7e70b2672b1ad5166ed27841ef7/PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef", size = 696937 }, - { url = "https://files.pythonhosted.org/packages/b4/33/720548182ffa8344418126017aa1d4ab4aeec9a2275f04ce3f3573d8ace8/PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0", size = 724969 }, - { url = "https://files.pythonhosted.org/packages/4f/78/77b40157b6cb5f2d3d31a3d9b2efd1ba3505371f76730d267e8b32cf4b7f/PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4", size = 712604 }, - { url = "https://files.pythonhosted.org/packages/2e/97/3e0e089ee85e840f4b15bfa00e4e63d84a3691ababbfea92d6f820ea6f21/PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54", size = 126098 }, - { url = "https://files.pythonhosted.org/packages/2b/9f/fbade56564ad486809c27b322d0f7e6a89c01f6b4fe208402e90d4443a99/PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df", size = 138675 }, +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, ] [[package]] @@ -1295,7 +1370,7 @@ wheels = [ [[package]] name = "requests" -version = "2.29.0" +version = "2.32.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1303,9 +1378,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/d2/70fc708727b62d55bc24e43cc85f073039023212d482553d853c44e57bdb/requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059", size = 108279 } +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/e1/2aa539876d9ed0ddc95882451deb57cfd7aa8dbf0b8dbce68e045549ba56/requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b", size = 62499 }, + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, ] [[package]] @@ -1322,17 +1397,16 @@ wheels = [ [[package]] name = "responses" -version = "0.23.1" +version = "0.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, { name = "requests" }, - { name = "types-pyyaml" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/4f/5033bf66528c832e7fcc48e76f540bf401302c55041c7fb488b4fbaaec4a/responses-0.23.1.tar.gz", hash = "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f", size = 72966 } +sdist = { url = "https://files.pythonhosted.org/packages/67/24/1d67c8974daa502e860b4a5b57ad6de0d7dbc0b1160ef7148189a24a40e1/responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba", size = 77798 } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/6a/64c85e69c6a7b02e828ed193b2fc15e3ff6581f87501666b98feabc54809/responses-0.23.1-py3-none-any.whl", hash = "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd", size = 52083 }, + { url = "https://files.pythonhosted.org/packages/12/24/93293d0be0db9da1ed8dfc5e6af700fdd40e8f10a928704dd179db9f03c1/responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb", size = 55238 }, ] [[package]] @@ -1418,49 +1492,49 @@ wheels = [ [[package]] name = "rpds-py" -version = "0.20.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/cb/8e919951f55d109d658f81c9b49d0cc3b48637c50792c5d2e77032b8c5da/rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350", size = 25931 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/2e/a6ded84019a05b8f23e0fe6a632f62ae438a8c5e5932d3dfc90c73418414/rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75", size = 327194 }, - { url = "https://files.pythonhosted.org/packages/68/11/d3f84c69de2b2086be3d6bd5e9d172825c096b13842ab7e5f8f39f06035b/rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712", size = 318126 }, - { url = "https://files.pythonhosted.org/packages/18/c0/13f1bce9c901511e5e4c0b77a99dbb946bb9a177ca88c6b480e9cb53e304/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a", size = 361119 }, - { url = "https://files.pythonhosted.org/packages/06/31/3bd721575671f22a37476c2d7b9e34bfa5185bdcee09f7fedde3b29f3adb/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93", size = 369532 }, - { url = "https://files.pythonhosted.org/packages/20/22/3eeb0385f33251b4fd0f728e6a3801dc8acc05e714eb7867cefe635bf4ab/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751", size = 403703 }, - { url = "https://files.pythonhosted.org/packages/10/e1/8dde6174e7ac5b9acd3269afca2e17719bc7e5088c68f44874d2ad9e4560/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535", size = 429868 }, - { url = "https://files.pythonhosted.org/packages/19/51/a3cc1a5238acfc2582033e8934d034301f9d4931b9bf7c7ccfabc4ca0880/rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0", size = 360539 }, - { url = "https://files.pythonhosted.org/packages/cd/8c/3c87471a44bd4114e2b0aec90f298f6caaac4e8db6af904d5dd2279f5c61/rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e", size = 382467 }, - { url = "https://files.pythonhosted.org/packages/d0/9b/95073fe3e0f130e6d561e106818b6568ef1f2df3352e7f162ab912da837c/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8", size = 546669 }, - { url = "https://files.pythonhosted.org/packages/de/4c/7ab3669e02bb06fedebcfd64d361b7168ba39dfdf385e4109440f2e7927b/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4", size = 549304 }, - { url = "https://files.pythonhosted.org/packages/f1/e8/ad5da336cd42adbdafe0ecd40dcecdae01fd3d703c621c7637615a008d3a/rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3", size = 527637 }, - { url = "https://files.pythonhosted.org/packages/02/f1/1b47b9e5b941c2659c9b7e4ef41b6f07385a6500c638fa10c066e4616ecb/rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732", size = 200488 }, - { url = "https://files.pythonhosted.org/packages/85/f6/c751c1adfa31610055acfa1cc667cf2c2d7011a73070679c448cf5856905/rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84", size = 218475 }, - { url = "https://files.pythonhosted.org/packages/e7/10/4e8dcc08b58a548098dbcee67a4888751a25be7a6dde0a83d4300df48bfa/rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17", size = 329749 }, - { url = "https://files.pythonhosted.org/packages/d2/e4/61144f3790e12fd89e6153d77f7915ad26779735fef8ee9c099cba6dfb4a/rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c", size = 321032 }, - { url = "https://files.pythonhosted.org/packages/fa/e0/99205aabbf3be29ef6c58ef9b08feed51ba6532fdd47461245cb58dd9897/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d", size = 363931 }, - { url = "https://files.pythonhosted.org/packages/ac/bd/bce2dddb518b13a7e77eed4be234c9af0c9c6d403d01c5e6ae8eb447ab62/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f", size = 373343 }, - { url = "https://files.pythonhosted.org/packages/43/15/112b7c553066cb91264691ba7fb119579c440a0ae889da222fa6fc0d411a/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01", size = 406304 }, - { url = "https://files.pythonhosted.org/packages/af/8d/2da52aef8ae5494a382b0c0025ba5b68f2952db0f2a4c7534580e8ca83cc/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a", size = 423022 }, - { url = "https://files.pythonhosted.org/packages/c8/1b/f23015cb293927c93bdb4b94a48bfe77ad9d57359c75db51f0ff0cf482ff/rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb", size = 364937 }, - { url = "https://files.pythonhosted.org/packages/7b/8b/6da8636b2ea2e2f709e56656e663b6a71ecd9a9f9d9dc21488aade122026/rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa", size = 386301 }, - { url = "https://files.pythonhosted.org/packages/20/af/2ae192797bffd0d6d558145b5a36e7245346ff3e44f6ddcb82f0eb8512d4/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc", size = 549452 }, - { url = "https://files.pythonhosted.org/packages/07/dd/9f6520712a5108cd7d407c9db44a3d59011b385c58e320d58ebf67757a9e/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd", size = 554370 }, - { url = "https://files.pythonhosted.org/packages/5e/0e/b1bdc7ea0db0946d640ab8965146099093391bb5d265832994c47461e3c5/rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5", size = 530940 }, - { url = "https://files.pythonhosted.org/packages/ae/d3/ffe907084299484fab60a7955f7c0e8a295c04249090218c59437010f9f4/rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c", size = 203164 }, - { url = "https://files.pythonhosted.org/packages/1f/ba/9cbb57423c4bfbd81c473913bebaed151ad4158ee2590a4e4b3e70238b48/rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb", size = 220750 }, - { url = "https://files.pythonhosted.org/packages/b5/01/fee2e1d1274c92fff04aa47d805a28d62c2aa971d1f49f5baea1c6e670d9/rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e", size = 329359 }, - { url = "https://files.pythonhosted.org/packages/b0/cf/4aeffb02b7090029d7aeecbffb9a10e1c80f6f56d7e9a30e15481dc4099c/rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c", size = 320543 }, - { url = "https://files.pythonhosted.org/packages/17/69/85cf3429e9ccda684ba63ff36b5866d5f9451e921cc99819341e19880334/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc", size = 363107 }, - { url = "https://files.pythonhosted.org/packages/ef/de/7df88dea9c3eeb832196d23b41f0f6fc5f9a2ee9b2080bbb1db8731ead9c/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8", size = 372027 }, - { url = "https://files.pythonhosted.org/packages/d1/b8/88675399d2038580743c570a809c43a900e7090edc6553f8ffb66b23c965/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d", size = 405031 }, - { url = "https://files.pythonhosted.org/packages/e1/aa/cca639f6d17caf00bab51bdc70fcc0bdda3063e5662665c4fdf60443c474/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982", size = 422271 }, - { url = "https://files.pythonhosted.org/packages/c4/07/bf8a949d2ec4626c285579c9d6b356c692325f1a4126e947736b416e1fc4/rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496", size = 363625 }, - { url = "https://files.pythonhosted.org/packages/11/f0/06675c6a58d6ce34547879138810eb9aab0c10e5607ea6c2e4dc56b703c8/rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4", size = 385906 }, - { url = "https://files.pythonhosted.org/packages/bf/ac/2d1f50374eb8e41030fad4e87f81751e1c39e3b5d4bee8c5618830d8a6ac/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7", size = 549021 }, - { url = "https://files.pythonhosted.org/packages/f7/d4/a7d70a7cc71df772eeadf4bce05e32e780a9fe44a511a5b091c7a85cb767/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a", size = 553800 }, - { url = "https://files.pythonhosted.org/packages/87/81/dc30bc449ccba63ad23a0f6633486d4e0e6955f45f3715a130dacabd6ad0/rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb", size = 531076 }, - { url = "https://files.pythonhosted.org/packages/50/80/fb62ab48f3b5cfe704ead6ad372da1922ddaa76397055e02eb507054c979/rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782", size = 202804 }, - { url = "https://files.pythonhosted.org/packages/d9/30/a3391e76d0b3313f33bdedd394a519decae3a953d2943e3dabf80ae32447/rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e", size = 220502 }, +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/80/afdf96daf9b27d61483ef05b38f282121db0e38f5fd4e89f40f5c86c2a4f/rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db", size = 26335 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/61/615929ea79f5fd0b3aca000411a33bcc1753607ccc1af0ce7b05b56e6e56/rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95", size = 327267 }, + { url = "https://files.pythonhosted.org/packages/a5/f5/28e89dda55b731d78cbfea284dc9789d265a8a06523f0adf60e9b05cade7/rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9", size = 318227 }, + { url = "https://files.pythonhosted.org/packages/e4/ef/eb90feb3e384543c48e2f867551075c43a429aa4c9a44e9c4bd71f4f786b/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027", size = 361235 }, + { url = "https://files.pythonhosted.org/packages/ed/e7/8ea2d3d3398266c5c8ddd957d86003493b6d14f8f158b726dd09c8f43dee/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9", size = 369467 }, + { url = "https://files.pythonhosted.org/packages/51/25/a286abda9da7820c971a0b1abcf1d31fb81c44a1088a128ad26c77206622/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3", size = 403482 }, + { url = "https://files.pythonhosted.org/packages/7a/1e/9c3c0463fe142456dcd9e9be0ffd15b66a77adfcdf3ecf94fa2b12d95fcb/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8", size = 429943 }, + { url = "https://files.pythonhosted.org/packages/e1/fd/f1fd7e77fef8e5a442ce7fd80ba957730877515fe18d7195f646408a60ce/rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d", size = 360437 }, + { url = "https://files.pythonhosted.org/packages/55/83/347932db075847f4f8172c3b53ad70fe725edd9058f0d4098080ad45e3bc/rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75", size = 382400 }, + { url = "https://files.pythonhosted.org/packages/22/9b/2a6eeab4e6752adba751cfee19bdf35d11e1073509f74883cbf14d42d682/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f", size = 546560 }, + { url = "https://files.pythonhosted.org/packages/3c/19/6e51a141fe6f017d07b7d899b10a4af9e0f268deffacc1107d70fcd9257b/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a", size = 549334 }, + { url = "https://files.pythonhosted.org/packages/cf/40/4ae09a07e4531278e6bee41ef3e4f166c23468135afc2c6c98917bfc28e6/rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8", size = 527855 }, + { url = "https://files.pythonhosted.org/packages/eb/45/2135be31543677687a426117c56d8b33e8b581bc4a8b7abfa53721012162/rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a", size = 200968 }, + { url = "https://files.pythonhosted.org/packages/68/fa/e66c3aaf13ef91c203ba47c102cd7c5dca92dde8837e5093577968d6d36d/rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e", size = 218502 }, + { url = "https://files.pythonhosted.org/packages/d9/5a/3aa6f5d8bacbe4f55ebf9a3c9628dad40cdb57f845124cf13c78895ea156/rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d", size = 329516 }, + { url = "https://files.pythonhosted.org/packages/df/c0/67c8c8ac850c6e3681e356a59d46315bf73bc77cb50c9a32db8ae44325b7/rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72", size = 321245 }, + { url = "https://files.pythonhosted.org/packages/64/83/bf31341f21fa594035891ff04a497dc86b210cc1a903a9cc01b097cc614f/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266", size = 363951 }, + { url = "https://files.pythonhosted.org/packages/a2/e1/8218bba36737621262df316fbb729639af25ff611cc07bfeaadc1bfa6292/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be", size = 373113 }, + { url = "https://files.pythonhosted.org/packages/39/8d/4afcd688e3ad33ec273900f42e6a41e9bd9f43cfc509b6d498683d2d0338/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab", size = 405944 }, + { url = "https://files.pythonhosted.org/packages/fa/65/3326efa721b6ecd70262aab69a26c9bc19398cdb0a2a416ef30b58326460/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7", size = 422874 }, + { url = "https://files.pythonhosted.org/packages/31/fb/48a647d0afab74289dd21a4128002d58684c22600a22c4bfb76cb9e3bfb0/rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf", size = 364227 }, + { url = "https://files.pythonhosted.org/packages/f1/b0/1cdd179d7382dd52d65b1fd19c54d090b6bd0688dfbe259bb5ab7548c359/rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4", size = 386447 }, + { url = "https://files.pythonhosted.org/packages/dc/41/84ace07f31aac3a96b73a374d89106cf252f7d3274e7cae85d17a27c602d/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca", size = 549386 }, + { url = "https://files.pythonhosted.org/packages/33/ce/bf51bc5a3aa539171ea8c7737ab5ac06cef54c79b6b2a0511afc41533c89/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b", size = 554777 }, + { url = "https://files.pythonhosted.org/packages/76/b1/950568e55a94c2979c2b61ec24e76e648a525fbc7551ccfc1f2841e39d44/rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11", size = 530918 }, + { url = "https://files.pythonhosted.org/packages/78/84/93f00e3613426c8a7a9ca16782d2828f2ac55296dd5c6b599379d9f59ee2/rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952", size = 203112 }, + { url = "https://files.pythonhosted.org/packages/e6/08/7a186847dd78881a781d2be9b42c8e49c3261c0f4a6d0289ba9a1e4cde71/rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd", size = 220735 }, + { url = "https://files.pythonhosted.org/packages/32/3a/e69ec108eefb9b1f19ee00dde7a800b485942e62b123f01d9156a6d8569c/rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937", size = 329206 }, + { url = "https://files.pythonhosted.org/packages/f6/c0/fa689498fa3415565306398c8d2a596207c2a13d3cc03724f32514bddfbc/rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560", size = 320245 }, + { url = "https://files.pythonhosted.org/packages/68/d0/466b61007005f1b2fd8501f23e4bdee4d71c7381b61358750920d1882ac9/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b", size = 363585 }, + { url = "https://files.pythonhosted.org/packages/1e/e2/787ea3a0f4b197893c62c254e6f14929c40bbcff86922928ac4eafaa8edf/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0", size = 372302 }, + { url = "https://files.pythonhosted.org/packages/b5/ef/99f2cfe6aa128c21f1b30c66ecd348cbd59792953ca35eeb6efa38b88aa1/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44", size = 405344 }, + { url = "https://files.pythonhosted.org/packages/30/3c/9d12d0b76ecfe80a7ba4770459828dda495d72b18cafd6dfd54c67b2e282/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74", size = 422322 }, + { url = "https://files.pythonhosted.org/packages/f9/22/387aec1cd6e124adbc3b1f40c4e4152c3963ae47d78d3ca650102ea72c4f/rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94", size = 363739 }, + { url = "https://files.pythonhosted.org/packages/d1/3e/0ad65b776db13d13f002ab363fe3821cd1adec500d8e05e0a81047a75f9d/rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3", size = 386579 }, + { url = "https://files.pythonhosted.org/packages/4f/3b/c68c1067b24a7df47edcc0325a825908601aba399e2d372a156edc631ad1/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a", size = 548924 }, + { url = "https://files.pythonhosted.org/packages/ab/1c/35f1a5cce4bca71c49664f00140010a96b126e5f443ebaf6db741c25b9b7/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3", size = 554217 }, + { url = "https://files.pythonhosted.org/packages/c8/d0/48154c152f9adb8304b21d867d28e79be3b352633fb195c03c7107a4da9a/rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976", size = 530540 }, + { url = "https://files.pythonhosted.org/packages/50/e8/78847f4e112e99fd5b7bc30fea3e4a44c20b811473d6755f944c5bf0aec7/rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202", size = 202604 }, + { url = "https://files.pythonhosted.org/packages/60/31/083e6337775e133fb0217ed0ab0752380efa6e5112f2250d592d4135a228/rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e", size = 220448 }, ] [[package]] @@ -1520,11 +1594,11 @@ wheels = [ [[package]] name = "setuptools" -version = "75.4.0" +version = "75.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e2/73/c1ccf3e057ef6331cc6861412905dc218203bde46dfe8262c1631aa7fb11/setuptools-75.4.0.tar.gz", hash = "sha256:1dc484f5cf56fd3fe7216d7b8df820802e7246cfb534a1db2aa64f14fcb9cdcb", size = 1336593 } +sdist = { url = "https://files.pythonhosted.org/packages/c8/db/722a42ffdc226e950c4757b3da7b56ff5c090bb265dccd707f7b8a3c6fee/setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef", size = 1336032 } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/df/7c6bb83dcb45b35dc35b310d752f254211cde0bcd2a35290ea6e2862b2a9/setuptools-75.4.0-py3-none-any.whl", hash = "sha256:b3c5d862f98500b06ffdf7cc4499b48c46c317d8d56cb30b5c8bce4d88f5c216", size = 1223131 }, + { url = "https://files.pythonhosted.org/packages/fe/df/88ccbee85aefbca071db004fdc8f8d2507d55d5a9dc27ebb93c92edb1bd8/setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829", size = 1222710 }, ] [[package]] @@ -1561,7 +1635,7 @@ wheels = [ [[package]] name = "snowfakery" version = "4.0.0" -source = { directory = "../Snowfakery" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "faker" }, @@ -1577,41 +1651,9 @@ dependencies = [ { name = "setuptools" }, { name = "sqlalchemy" }, ] - -[package.metadata] -requires-dist = [ - { name = "click" }, - { name = "faker" }, - { name = "faker-edu" }, - { name = "faker-nonprofit" }, - { name = "gvgen" }, - { name = "jinja2" }, - { name = "pydantic", specifier = "<2.0.0" }, - { name = "python-baseconv" }, - { name = "python-dateutil" }, - { name = "pyyaml" }, - { name = "requests" }, - { name = "setuptools", specifier = ">=75.4.0" }, - { name = "sqlalchemy", specifier = "<3" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "black", specifier = ">=24.10.0" }, - { name = "coverage", specifier = ">=7.6.4" }, - { name = "coveralls", specifier = ">=4.0.1" }, - { name = "diff-cover", specifier = ">=9.2.0" }, - { name = "faker-microservice", specifier = ">=2.0.0" }, - { name = "jsonschema", specifier = ">=4.23.0" }, - { name = "mkdocs", specifier = ">=1.6.1" }, - { name = "mkdocs-exclude-search", specifier = ">=0.6.6" }, - { name = "pre-commit", specifier = ">=4.0.1" }, - { name = "pyright", specifier = ">=1.1.388" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "pytest-cov", specifier = ">=6.0.0" }, - { name = "pytest-vcr", specifier = ">=1.0.2" }, - { name = "responses", specifier = ">=0.23.1" }, - { name = "vcrpy", specifier = ">=6.0.2" }, +sdist = { url = "https://files.pythonhosted.org/packages/69/94/51848ad67a409e2b8d37e10277e4ee43b8c982a47fd6e9bb114f427374b0/snowfakery-4.0.0.tar.gz", hash = "sha256:95b4a5add5b7e8483fcbf567e3b83ec7418031ce8a00fdc8542c906ec5392d91", size = 76039 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/dd/6da304a468b69f036c2185a116cf4840e36b510e65c22bebb07abeec78fd/snowfakery-4.0.0-py3-none-any.whl", hash = "sha256:38ed1faec5839d45454a2ecf0a64ec4cb352662e84694b204866a0e9dedc1a52", size = 100733 }, ] [[package]] @@ -1625,7 +1667,7 @@ wheels = [ [[package]] name = "sphinx" -version = "5.3.0" +version = "8.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alabaster" }, @@ -1645,9 +1687,9 @@ dependencies = [ { name = "sphinxcontrib-qthelp" }, { name = "sphinxcontrib-serializinghtml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/b2/02a43597980903483fe5eb081ee8e0ba2bb62ea43a70499484343795f3bf/Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5", size = 6811365 } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/a7/01dd6fd9653c056258d65032aa09a615b5d7b07dd840845a9f41a8860fbc/sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d", size = 3183160 }, + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, ] [[package]] @@ -1664,29 +1706,29 @@ wheels = [ [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/32/df/45e827f4d7e7fcc84e853bcef1d836effd762d63ccb86f43ede4e98b478c/sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e", size = 24766 } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/c1/5e2cafbd03105ce50d8500f9b4e8a6e8d02e22d0475b574c3b3e9451a15f/sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228", size = 120601 }, + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, ] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/33/dc28393f16385f722c893cb55539c641c9aaec8d1bc1c15b69ce0ac2dbb3/sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4", size = 17398 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/09/5de5ed43a521387f18bdf5f5af31d099605c992fd25372b2b9b825ce48ee/sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e", size = 84690 }, + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, ] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/47/64cff68ea3aa450c373301e5bebfbb9fce0a3e70aca245fcadd4af06cd75/sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff", size = 27967 } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/ee/a1f5e39046cbb5f8bc8fba87d1ddf1c6643fbc9194e58d26e606de4b9074/sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903", size = 99833 }, + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, ] [[package]] @@ -1700,41 +1742,41 @@ wheels = [ [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/8e/c4846e59f38a5f2b4a0e3b27af38f2fcf904d4bfd82095bf92de0b114ebd/sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72", size = 21658 } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/14/05f9206cf4e9cfca1afb5fd224c7cd434dcc3a433d6d9e4e0264d29c6cdb/sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6", size = 90609 }, + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, ] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/72/835d6fadb9e5d02304cf39b18f93d227cd93abd3c41ebf58e6853eeb1455/sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952", size = 21019 } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/77/5464ec50dd0f1c1037e3c93249b040c8fc8078fdda97530eeb02424b6eea/sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd", size = 94021 }, + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, ] [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "1.4.54" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "(platform_machine == 'AMD64' and python_full_version >= '3.11') or (platform_machine == 'WIN32' and python_full_version >= '3.11') or (platform_machine == 'aarch64' and python_full_version >= '3.11') or (platform_machine == 'amd64' and python_full_version >= '3.11') or (platform_machine == 'ppc64le' and python_full_version >= '3.11') or (platform_machine == 'win32' and python_full_version >= '3.11') or (platform_machine == 'x86_64' and python_full_version >= '3.11')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/a4/b5991829c34af0505e0f2b1ccf9588d1ba90f2d984ee208c90c985f1265a/SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296", size = 8514200 } +sdist = { url = "https://files.pythonhosted.org/packages/ce/af/20290b55d469e873cba9d41c0206ab5461ff49d759989b3fe65010f9d265/sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a", size = 8470350 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/50/d5756b1faa3c727bc3e2601ee1975c00e6adbafbbc436bea1e87af86328d/SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d", size = 1588505 }, - { url = "https://files.pythonhosted.org/packages/81/05/73cb4865011f85fc3c4af8af06dc66b50527208f83c90b807071abba8da1/SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0", size = 1628161 }, - { url = "https://files.pythonhosted.org/packages/a9/5b/3afbd03f813b7ba929887d0d1107b54e7bad4e3a10664ab21a05eb777149/SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb", size = 1626252 }, - { url = "https://files.pythonhosted.org/packages/78/5b/9eda3191ff1b9e101addc4f67df6c4a2836569f36f2e0117abe362e65b33/SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e", size = 1589454 }, - { url = "https://files.pythonhosted.org/packages/a3/65/bede5ab82a258c4a55a5cbfacbfc74e3ca2b82e61085a815f919fa1660be/SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12", size = 1591519 }, - { url = "https://files.pythonhosted.org/packages/fc/30/7e04f16d0508d4e57edd5c8def5810bb31bc73203beacd8bf83ed18ff0f1/SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c", size = 1589216 }, - { url = "https://files.pythonhosted.org/packages/ce/e6/9da1e081321a514c0147a2e0b293f27ca93f0f299cbd5ba746a9422a9f07/SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554", size = 1628827 }, - { url = "https://files.pythonhosted.org/packages/10/c1/1613a8dcd05e6dacc9505554ce6c217a1cfda0da9c7592e258856945c6b6/SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b", size = 1627867 }, - { url = "https://files.pythonhosted.org/packages/0e/a7/97e7893673165b41dacfb07476df83a2fb5c9445feea5e54ad6ed3d27cb5/SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126", size = 1589871 }, - { url = "https://files.pythonhosted.org/packages/49/62/d0e4502e27eaa10da35243d5241c3be3ed3974d607281e3b4ccc065d9853/SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64", size = 1591783 }, + { url = "https://files.pythonhosted.org/packages/da/49/fb98983b5568e93696a25fd5bec1b789095b79a72d5f57c6effddaa81d0a/SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e", size = 1589301 }, + { url = "https://files.pythonhosted.org/packages/03/98/5a81430bbd646991346cb088a2bdc84d1bcd3dbe6b0cfc1aaa898370e5c7/SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936", size = 1629553 }, + { url = "https://files.pythonhosted.org/packages/f1/17/14e35db2b0d6deaa27691d014addbb0dd6f7e044f7ee465446a3c0c71404/SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f", size = 1627640 }, + { url = "https://files.pythonhosted.org/packages/98/62/335006a8f2c98f704f391e1a0cc01446d1b1b9c198f579f03599f55bd860/SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc", size = 1591723 }, + { url = "https://files.pythonhosted.org/packages/e2/a1/6b4b8c07082920f5445ec65c221fa33baab102aced5dcc2d87a15d3f8db4/SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5", size = 1593511 }, + { url = "https://files.pythonhosted.org/packages/a5/1b/aa9b99be95d1615f058b5827447c18505b7b3f1dfcbd6ce1b331c2107152/SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d", size = 1589983 }, + { url = "https://files.pythonhosted.org/packages/59/47/cb0fc64e5344f0a3d02216796c342525ab283f8f052d1c31a1d487d08aa0/SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4", size = 1630158 }, + { url = "https://files.pythonhosted.org/packages/c0/8b/f45dd378f6c97e8ff9332ff3d03ecb0b8c491be5bb7a698783b5a2f358ec/SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c", size = 1629232 }, + { url = "https://files.pythonhosted.org/packages/0d/3c/884fe389f5bec86a310b81e79abaa1e26e5d78dc10a84d544a6822833e47/SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f", size = 1592027 }, + { url = "https://files.pythonhosted.org/packages/01/c3/c690d037be57efd3a69cde16a2ef1bd2a905dafe869434d33836de0983d0/SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88", size = 1593827 }, ] [[package]] @@ -1748,16 +1790,16 @@ wheels = [ [[package]] name = "tomli" -version = "2.0.2" +version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 } +sdist = { url = "https://files.pythonhosted.org/packages/1e/e4/1b6cbcc82d8832dd0ce34767d5c560df8a3547ad8cbc427f34601415930a/tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8", size = 16622 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 }, + { url = "https://files.pythonhosted.org/packages/de/f7/4da0ffe1892122c9ea096c57f64c2753ae5dd3ce85488802d11b0992cc6d/tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391", size = 13750 }, ] [[package]] name = "tox" -version = "4.20.0" +version = "4.23.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, @@ -1770,9 +1812,9 @@ dependencies = [ { name = "pyproject-api" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/4a/55f9dba99aad874ae54a7fb2310c940e978fd0155eb3576ddebec000fca7/tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5", size = 181364 } +sdist = { url = "https://files.pythonhosted.org/packages/1f/86/32b10f91b4b975a37ac402b0f9fa016775088e0565c93602ba0b3c729ce8/tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c", size = 189998 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/ee/6f9bf37f197578f98fb450f1aeebf4570f85b24b00d846bbde6e11489bd1/tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34", size = 157087 }, + { url = "https://files.pythonhosted.org/packages/af/c0/124b73d01c120e917383bc6c53ebc34efdf7243faa9fca64d105c94cf2ab/tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38", size = 166758 }, ] [[package]] @@ -1784,22 +1826,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/bb/d43e5c75054e53efce310e79d63df0ac3f25e34c926be5dffb7d283fb2a8/typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1", size = 17605 }, ] -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240917" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/7d/a95df0a11f95c8f48d7683f03e4aed1a2c0fc73e9de15cca4d38034bea1a/types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587", size = 12381 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/2c/c1d81d680997d24b0542aa336f0a65bd7835e5224b7670f33a7d617da379/types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570", size = 15264 }, -] - [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558 } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926 }, + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, ] [[package]] @@ -1819,11 +1852,11 @@ wheels = [ [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/39/64487bf07df2ed854cc06078c27c0d0abc59bd27b32232876e403c333a08/urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0", size = 305687 } +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/53/aa91e163dcfd1e5b82d8a890ecf13314e3e149c05270cc644581f77f17fd/urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07", size = 143835 }, + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225 }, ] [[package]] @@ -1895,71 +1928,71 @@ wheels = [ [[package]] name = "yarl" -version = "1.15.2" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/e1/d5427a061819c9f885f58bb0467d02a523f1aec19f9e5f9c82ce950d90d3/yarl-1.15.2.tar.gz", hash = "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", size = 169318 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/59/3ae125c97a2a8571ea16fdf59fcbd288bc169e0005d1af9946a90ea831d9/yarl-1.15.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", size = 136492 }, - { url = "https://files.pythonhosted.org/packages/f9/2b/efa58f36b582db45b94c15e87803b775eb8a4ca0db558121a272e67f3564/yarl-1.15.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", size = 88614 }, - { url = "https://files.pythonhosted.org/packages/82/69/eb73c0453a2ff53194df485dc7427d54e6cb8d1180fcef53251a8e24d069/yarl-1.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", size = 86607 }, - { url = "https://files.pythonhosted.org/packages/48/4e/89beaee3a4da0d1c6af1176d738cff415ff2ad3737785ee25382409fe3e3/yarl-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", size = 334077 }, - { url = "https://files.pythonhosted.org/packages/da/e8/8fcaa7552093f94c3f327783e2171da0eaa71db0c267510898a575066b0f/yarl-1.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", size = 347365 }, - { url = "https://files.pythonhosted.org/packages/be/fa/dc2002f82a89feab13a783d3e6b915a3a2e0e83314d9e3f6d845ee31bfcc/yarl-1.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", size = 344823 }, - { url = "https://files.pythonhosted.org/packages/ae/c8/c4a00fe7f2aa6970c2651df332a14c88f8baaedb2e32d6c3b8c8a003ea74/yarl-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", size = 337132 }, - { url = "https://files.pythonhosted.org/packages/07/bf/84125f85f44bf2af03f3cf64e87214b42cd59dcc8a04960d610a9825f4d4/yarl-1.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", size = 326258 }, - { url = "https://files.pythonhosted.org/packages/00/19/73ad8122b2fa73fe22e32c24b82a6c053cf6c73e2f649b73f7ef97bee8d0/yarl-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", size = 336212 }, - { url = "https://files.pythonhosted.org/packages/39/1d/2fa4337d11f6587e9b7565f84eba549f2921494bc8b10bfe811079acaa70/yarl-1.15.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", size = 330397 }, - { url = "https://files.pythonhosted.org/packages/39/ab/dce75e06806bcb4305966471ead03ce639d8230f4f52c32bd614d820c044/yarl-1.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", size = 334985 }, - { url = "https://files.pythonhosted.org/packages/c1/98/3f679149347a5e34c952bf8f71a387bc96b3488fae81399a49f8b1a01134/yarl-1.15.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", size = 356033 }, - { url = "https://files.pythonhosted.org/packages/f7/8c/96546061c19852d0a4b1b07084a58c2e8911db6bcf7838972cff542e09fb/yarl-1.15.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", size = 357710 }, - { url = "https://files.pythonhosted.org/packages/01/45/ade6fb3daf689816ebaddb3175c962731edf300425c3254c559b6d0dcc27/yarl-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", size = 345532 }, - { url = "https://files.pythonhosted.org/packages/e7/d7/8de800d3aecda0e64c43e8fc844f7effc8731a6099fa0c055738a2247504/yarl-1.15.2-cp311-cp311-win32.whl", hash = "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", size = 78250 }, - { url = "https://files.pythonhosted.org/packages/3a/6c/69058bbcfb0164f221aa30e0cd1a250f6babb01221e27c95058c51c498ca/yarl-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", size = 84492 }, - { url = "https://files.pythonhosted.org/packages/e0/d1/17ff90e7e5b1a0b4ddad847f9ec6a214b87905e3a59d01bff9207ce2253b/yarl-1.15.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", size = 136721 }, - { url = "https://files.pythonhosted.org/packages/44/50/a64ca0577aeb9507f4b672f9c833d46cf8f1e042ce2e80c11753b936457d/yarl-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", size = 88954 }, - { url = "https://files.pythonhosted.org/packages/c9/0a/a30d0b02046d4088c1fd32d85d025bd70ceb55f441213dee14d503694f41/yarl-1.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", size = 86692 }, - { url = "https://files.pythonhosted.org/packages/06/0b/7613decb8baa26cba840d7ea2074bd3c5e27684cbcb6d06e7840d6c5226c/yarl-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", size = 325762 }, - { url = "https://files.pythonhosted.org/packages/97/f5/b8c389a58d1eb08f89341fc1bbcc23a0341f7372185a0a0704dbdadba53a/yarl-1.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", size = 335037 }, - { url = "https://files.pythonhosted.org/packages/cb/f9/d89b93a7bb8b66e01bf722dcc6fec15e11946e649e71414fd532b05c4d5d/yarl-1.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", size = 334221 }, - { url = "https://files.pythonhosted.org/packages/10/77/1db077601998e0831a540a690dcb0f450c31f64c492e993e2eaadfbc7d31/yarl-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", size = 330167 }, - { url = "https://files.pythonhosted.org/packages/3b/c2/e5b7121662fd758656784fffcff2e411c593ec46dc9ec68e0859a2ffaee3/yarl-1.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", size = 317472 }, - { url = "https://files.pythonhosted.org/packages/c6/f3/41e366c17e50782651b192ba06a71d53500cc351547816bf1928fb043c4f/yarl-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", size = 330896 }, - { url = "https://files.pythonhosted.org/packages/79/a2/d72e501bc1e33e68a5a31f584fe4556ab71a50a27bfd607d023f097cc9bb/yarl-1.15.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", size = 328787 }, - { url = "https://files.pythonhosted.org/packages/9d/ba/890f7e1ea17f3c247748548eee876528ceb939e44566fa7d53baee57e5aa/yarl-1.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", size = 332631 }, - { url = "https://files.pythonhosted.org/packages/48/c7/27b34206fd5dfe76b2caa08bf22f9212b2d665d5bb2df8a6dd3af498dcf4/yarl-1.15.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", size = 344023 }, - { url = "https://files.pythonhosted.org/packages/88/e7/730b130f4f02bd8b00479baf9a57fdea1dc927436ed1d6ba08fa5c36c68e/yarl-1.15.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", size = 352290 }, - { url = "https://files.pythonhosted.org/packages/84/9b/e8dda28f91a0af67098cddd455e6b540d3f682dda4c0de224215a57dee4a/yarl-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", size = 343742 }, - { url = "https://files.pythonhosted.org/packages/66/47/b1c6bb85f2b66decbe189e27fcc956ab74670a068655df30ef9a2e15c379/yarl-1.15.2-cp312-cp312-win32.whl", hash = "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", size = 78051 }, - { url = "https://files.pythonhosted.org/packages/7d/9e/1a897e5248ec53e96e9f15b3e6928efd5e75d322c6cf666f55c1c063e5c9/yarl-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", size = 84313 }, - { url = "https://files.pythonhosted.org/packages/46/ab/be3229898d7eb1149e6ba7fe44f873cf054d275a00b326f2a858c9ff7175/yarl-1.15.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", size = 135006 }, - { url = "https://files.pythonhosted.org/packages/10/10/b91c186b1b0e63951f80481b3e6879bb9f7179d471fe7c4440c9e900e2a3/yarl-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", size = 88121 }, - { url = "https://files.pythonhosted.org/packages/bf/1d/4ceaccf836b9591abfde775e84249b847ac4c6c14ee2dd8d15b5b3cede44/yarl-1.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", size = 85967 }, - { url = "https://files.pythonhosted.org/packages/93/bd/c924f22bdb2c5d0ca03a9e64ecc5e041aace138c2a91afff7e2f01edc3a1/yarl-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", size = 325615 }, - { url = "https://files.pythonhosted.org/packages/59/a5/6226accd5c01cafd57af0d249c7cf9dd12569cd9c78fbd93e8198e7a9d84/yarl-1.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", size = 334945 }, - { url = "https://files.pythonhosted.org/packages/4c/c1/cc6ccdd2bcd0ff7291602d5831754595260f8d2754642dfd34fef1791059/yarl-1.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", size = 336701 }, - { url = "https://files.pythonhosted.org/packages/ef/ff/39a767ee249444e4b26ea998a526838238f8994c8f274befc1f94dacfb43/yarl-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", size = 330977 }, - { url = "https://files.pythonhosted.org/packages/dd/ba/b1fed73f9d39e3e7be8f6786be5a2ab4399c21504c9168c3cadf6e441c2e/yarl-1.15.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", size = 317402 }, - { url = "https://files.pythonhosted.org/packages/82/e8/03e3ebb7f558374f29c04868b20ca484d7997f80a0a191490790a8c28058/yarl-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", size = 331776 }, - { url = "https://files.pythonhosted.org/packages/1f/83/90b0f4fd1ecf2602ba4ac50ad0bbc463122208f52dd13f152bbc0d8417dd/yarl-1.15.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", size = 331585 }, - { url = "https://files.pythonhosted.org/packages/c7/f6/1ed7e7f270ae5f9f1174c1f8597b29658f552fee101c26de8b2eb4ca147a/yarl-1.15.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", size = 336395 }, - { url = "https://files.pythonhosted.org/packages/e0/3a/4354ed8812909d9ec54a92716a53259b09e6b664209231f2ec5e75f4820d/yarl-1.15.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", size = 342810 }, - { url = "https://files.pythonhosted.org/packages/de/cc/39e55e16b1415a87f6d300064965d6cfb2ac8571e11339ccb7dada2444d9/yarl-1.15.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", size = 351441 }, - { url = "https://files.pythonhosted.org/packages/fb/19/5cd4757079dc9d9f3de3e3831719b695f709a8ce029e70b33350c9d082a7/yarl-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", size = 345875 }, - { url = "https://files.pythonhosted.org/packages/83/a0/ef09b54634f73417f1ea4a746456a4372c1b044f07b26e16fa241bd2d94e/yarl-1.15.2-cp313-cp313-win32.whl", hash = "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", size = 302609 }, - { url = "https://files.pythonhosted.org/packages/20/9f/f39c37c17929d3975da84c737b96b606b68c495cc4ee86408f10523a1635/yarl-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", size = 308252 }, - { url = "https://files.pythonhosted.org/packages/46/cf/a28c494decc9c8776b0d7b729c68d26fdafefcedd8d2eab5d9cd767376b2/yarl-1.15.2-py3-none-any.whl", hash = "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", size = 38891 }, +sdist = { url = "https://files.pythonhosted.org/packages/54/9c/9c0a9bfa683fc1be7fdcd9687635151544d992cccd48892dc5e0a5885a29/yarl-1.17.1.tar.gz", hash = "sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47", size = 178163 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/0f/ce6a2c8aab9946446fb27f1e28f0fd89ce84ae913ab18a92d18078a1c7ed/yarl-1.17.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217", size = 140727 }, + { url = "https://files.pythonhosted.org/packages/9d/df/204f7a502bdc3973cd9fc29e7dfad18ae48b3acafdaaf1ae07c0f41025aa/yarl-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988", size = 93560 }, + { url = "https://files.pythonhosted.org/packages/a2/e1/f4d522ae0560c91a4ea31113a50f00f85083be885e1092fc6e74eb43cb1d/yarl-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75", size = 91497 }, + { url = "https://files.pythonhosted.org/packages/f1/82/783d97bf4a226f1a2e59b1966f2752244c2bf4dc89bc36f61d597b8e34e5/yarl-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca", size = 339446 }, + { url = "https://files.pythonhosted.org/packages/e5/ff/615600647048d81289c80907165de713fbc566d1e024789863a2f6563ba3/yarl-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74", size = 354616 }, + { url = "https://files.pythonhosted.org/packages/a5/04/bfb7adb452bd19dfe0c35354ffce8ebc3086e028e5f8270e409d17da5466/yarl-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f", size = 351801 }, + { url = "https://files.pythonhosted.org/packages/10/e0/efe21edacdc4a638ce911f8cabf1c77cac3f60e9819ba7d891b9ceb6e1d4/yarl-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d", size = 343381 }, + { url = "https://files.pythonhosted.org/packages/63/f9/7bc7e69857d6fc3920ecd173592f921d5701f4a0dd3f2ae293b386cfa3bf/yarl-1.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11", size = 337093 }, + { url = "https://files.pythonhosted.org/packages/93/52/99da61947466275ff17d7bc04b0ac31dfb7ec699bd8d8985dffc34c3a913/yarl-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0", size = 346619 }, + { url = "https://files.pythonhosted.org/packages/91/8a/8aaad86a35a16e485ba0e5de0d2ae55bf8dd0c9f1cccac12be4c91366b1d/yarl-1.17.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3", size = 344347 }, + { url = "https://files.pythonhosted.org/packages/af/b6/97f29f626b4a1768ffc4b9b489533612cfcb8905c90f745aade7b2eaf75e/yarl-1.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe", size = 350316 }, + { url = "https://files.pythonhosted.org/packages/d7/98/8e0e8b812479569bdc34d66dd3e2471176ca33be4ff5c272a01333c4b269/yarl-1.17.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860", size = 361336 }, + { url = "https://files.pythonhosted.org/packages/9e/d3/d1507efa0a85c25285f8eb51df9afa1ba1b6e446dda781d074d775b6a9af/yarl-1.17.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4", size = 365350 }, + { url = "https://files.pythonhosted.org/packages/22/ba/ee7f1830449c96bae6f33210b7d89e8aaf3079fbdaf78ac398e50a9da404/yarl-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4", size = 357689 }, + { url = "https://files.pythonhosted.org/packages/a0/85/321c563dc5afe1661108831b965c512d185c61785400f5606006507d2e18/yarl-1.17.1-cp311-cp311-win32.whl", hash = "sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7", size = 83635 }, + { url = "https://files.pythonhosted.org/packages/bc/da/543a32c00860588ff1235315b68f858cea30769099c32cd22b7bb266411b/yarl-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3", size = 90218 }, + { url = "https://files.pythonhosted.org/packages/5d/af/e25615c7920396219b943b9ff8b34636ae3e1ad30777649371317d7f05f8/yarl-1.17.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61", size = 141839 }, + { url = "https://files.pythonhosted.org/packages/83/5e/363d9de3495c7c66592523f05d21576a811015579e0c87dd38c7b5788afd/yarl-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d", size = 94125 }, + { url = "https://files.pythonhosted.org/packages/e3/a2/b65447626227ebe36f18f63ac551790068bf42c69bb22dfa3ae986170728/yarl-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139", size = 92048 }, + { url = "https://files.pythonhosted.org/packages/a1/f5/2ef86458446f85cde10582054fd5113495ef8ce8477da35aaaf26d2970ef/yarl-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5", size = 331472 }, + { url = "https://files.pythonhosted.org/packages/f3/6b/1ba79758ba352cdf2ad4c20cab1b982dd369aa595bb0d7601fc89bf82bee/yarl-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac", size = 341260 }, + { url = "https://files.pythonhosted.org/packages/2d/41/4e07c2afca3f9ed3da5b0e38d43d0280d9b624a3d5c478c425e5ce17775c/yarl-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463", size = 340882 }, + { url = "https://files.pythonhosted.org/packages/c3/c0/cd8e94618983c1b811af082e1a7ad7764edb3a6af2bc6b468e0e686238ba/yarl-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147", size = 336648 }, + { url = "https://files.pythonhosted.org/packages/ac/fc/73ec4340d391ffbb8f34eb4c55429784ec9f5bd37973ce86d52d67135418/yarl-1.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7", size = 325019 }, + { url = "https://files.pythonhosted.org/packages/57/48/da3ebf418fc239d0a156b3bdec6b17a5446f8d2dea752299c6e47b143a85/yarl-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685", size = 342841 }, + { url = "https://files.pythonhosted.org/packages/5d/79/107272745a470a8167924e353a5312eb52b5a9bb58e22686adc46c94f7ec/yarl-1.17.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172", size = 341433 }, + { url = "https://files.pythonhosted.org/packages/30/9c/6459668b3b8dcc11cd061fc53e12737e740fb6b1575b49c84cbffb387b3a/yarl-1.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7", size = 344927 }, + { url = "https://files.pythonhosted.org/packages/c5/0b/93a17ed733aca8164fc3a01cb7d47b3f08854ce4f957cce67a6afdb388a0/yarl-1.17.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da", size = 355732 }, + { url = "https://files.pythonhosted.org/packages/9a/63/ead2ed6aec3c59397e135cadc66572330325a0c24cd353cd5c94f5e63463/yarl-1.17.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c", size = 362123 }, + { url = "https://files.pythonhosted.org/packages/89/bf/f6b75b4c2fcf0e7bb56edc0ed74e33f37fac45dc40e5a52a3be66b02587a/yarl-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199", size = 356355 }, + { url = "https://files.pythonhosted.org/packages/45/1f/50a0257cd07eef65c8c65ad6a21f5fb230012d659e021aeb6ac8a7897bf6/yarl-1.17.1-cp312-cp312-win32.whl", hash = "sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96", size = 83279 }, + { url = "https://files.pythonhosted.org/packages/bc/82/fafb2c1268d63d54ec08b3a254fbe51f4ef098211501df646026717abee3/yarl-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df", size = 89590 }, + { url = "https://files.pythonhosted.org/packages/06/1e/5a93e3743c20eefbc68bd89334d9c9f04f3f2334380f7bbf5e950f29511b/yarl-1.17.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488", size = 139974 }, + { url = "https://files.pythonhosted.org/packages/a1/be/4e0f6919013c7c5eaea5c31811c551ccd599d2fc80aa3dd6962f1bbdcddd/yarl-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374", size = 93364 }, + { url = "https://files.pythonhosted.org/packages/73/f0/650f994bc491d0cb85df8bb45392780b90eab1e175f103a5edc61445ff67/yarl-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac", size = 91177 }, + { url = "https://files.pythonhosted.org/packages/f3/e8/9945ed555d14b43ede3ae8b1bd73e31068a694cad2b9d3cad0a28486c2eb/yarl-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170", size = 333086 }, + { url = "https://files.pythonhosted.org/packages/a6/c0/7d167e48e14d26639ca066825af8da7df1d2fcdba827e3fd6341aaf22a3b/yarl-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8", size = 343661 }, + { url = "https://files.pythonhosted.org/packages/fa/81/80a266517531d4e3553aecd141800dbf48d02e23ebd52909e63598a80134/yarl-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938", size = 345196 }, + { url = "https://files.pythonhosted.org/packages/b0/77/6adc482ba7f2dc6c0d9b3b492e7cd100edfac4cfc3849c7ffa26fd7beb1a/yarl-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e", size = 338743 }, + { url = "https://files.pythonhosted.org/packages/6d/cc/f0c4c0b92ff3ada517ffde2b127406c001504b225692216d969879ada89a/yarl-1.17.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556", size = 326719 }, + { url = "https://files.pythonhosted.org/packages/18/3b/7bfc80d3376b5fa162189993a87a5a6a58057f88315bd0ea00610055b57a/yarl-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67", size = 345826 }, + { url = "https://files.pythonhosted.org/packages/2e/66/cf0b0338107a5c370205c1a572432af08f36ca12ecce127f5b558398b4fd/yarl-1.17.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8", size = 340335 }, + { url = "https://files.pythonhosted.org/packages/2f/52/b084b0eec0fd4d2490e1d33ace3320fad704c5f1f3deaa709f929d2d87fc/yarl-1.17.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3", size = 345301 }, + { url = "https://files.pythonhosted.org/packages/ef/38/9e2036d948efd3bafcdb4976cb212166fded76615f0dfc6c1492c4ce4784/yarl-1.17.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0", size = 354205 }, + { url = "https://files.pythonhosted.org/packages/81/c1/13dfe1e70b86811733316221c696580725ceb1c46d4e4db852807e134310/yarl-1.17.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299", size = 360501 }, + { url = "https://files.pythonhosted.org/packages/91/87/756e05c74cd8bf9e71537df4a2cae7e8211a9ebe0d2350a3e26949e1e41c/yarl-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258", size = 359452 }, + { url = "https://files.pythonhosted.org/packages/06/b2/b2bb09c1e6d59e1c9b1b36a86caa473e22c3dbf26d1032c030e9bfb554dc/yarl-1.17.1-cp313-cp313-win32.whl", hash = "sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2", size = 308904 }, + { url = "https://files.pythonhosted.org/packages/f3/27/f084d9a5668853c1f3b246620269b14ee871ef3c3cc4f3a1dd53645b68ec/yarl-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda", size = 314637 }, + { url = "https://files.pythonhosted.org/packages/52/ad/1fe7ff5f3e8869d4c5070f47b96bac2b4d15e67c100a8278d8e7876329fc/yarl-1.17.1-py3-none-any.whl", hash = "sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06", size = 44352 }, ] [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/bf/5c0000c44ebc80123ecbdddba1f5dcd94a5ada602a9c225d84b5aaa55e86/zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29", size = 24199 } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/8b/5ba542fa83c90e09eac972fc9baca7a88e7e7ca4b221a89251954019308b/zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", size = 9200 }, + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, ] From f67ff0486e410d45a29eb848caa9f62ba7bdc0a9 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Wed, 13 Nov 2024 14:17:45 -0800 Subject: [PATCH 35/65] Clean up final sfdx references --- cumulusci/tasks/salesforce/tests/test_check_components.py | 2 +- cumulusci/tasks/tests/test_sfdx.py | 2 +- cumulusci/tasks/vlocity/tests/test_vlocity.py | 2 +- docs/cookbook.md | 6 +++--- docs/unlocked-package.md | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/cumulusci/tasks/salesforce/tests/test_check_components.py b/cumulusci/tasks/salesforce/tests/test_check_components.py index 330ea5c194..549275be99 100644 --- a/cumulusci/tasks/salesforce/tests/test_check_components.py +++ b/cumulusci/tasks/salesforce/tests/test_check_components.py @@ -99,7 +99,7 @@ def test_collect_components_from_paths( assert components is not None assert "ApexClass" not in components sfdx.assert_called_once_with( - "force:source:convert", + "project convert source", args=["-d", ANY, "-r", "force-app/main/default"], capture_output=True, check_return=True, diff --git a/cumulusci/tasks/tests/test_sfdx.py b/cumulusci/tasks/tests/test_sfdx.py index 513793cc08..a2b5a46a81 100644 --- a/cumulusci/tasks/tests/test_sfdx.py +++ b/cumulusci/tasks/tests/test_sfdx.py @@ -52,7 +52,7 @@ def test_base_task(self): def test_keychain_org_creds(self): """Keychain org creds are passed by env var""" - self.task_config.config["options"] = {"command": "force:org --help"} + self.task_config.config["options"] = {"command": "org --help"} access_token = "00d123" org_config = OrgConfig( { diff --git a/cumulusci/tasks/vlocity/tests/test_vlocity.py b/cumulusci/tasks/vlocity/tests/test_vlocity.py index e30be94ab2..03c0fbe3b4 100644 --- a/cumulusci/tasks/vlocity/tests/test_vlocity.py +++ b/cumulusci/tasks/vlocity/tests/test_vlocity.py @@ -247,7 +247,7 @@ def test_deploy_omni_studio_site_settings( # The frequent error is: # # "name": "NoOrgFound", -# "action": "Run the \"sfdx force:auth\" commands with --target-org to connect to an org and set it as your default org.\nRun \"org create scratch\" with --target-org to create a scratch org and set it as your default org.\nRun \"sf config set target-org=\" to set your default username." +# "action": "Run the \"sf auth\" commands with --target-org to connect to an org and set it as your default org.\nRun \"org create scratch\" with --target-org to create a scratch org and set it as your default org.\nRun \"sf config set target-org=\" to set your default username." # } diff --git a/docs/cookbook.md b/docs/cookbook.md index 3b175a169e..3ed54f35e1 100644 --- a/docs/cookbook.md +++ b/docs/cookbook.md @@ -43,7 +43,7 @@ run_custom_command: The `dx` task lets you run an arbitrary `sfdx` command. You can perform this with `cci` on a terminal: - $ cci task run dx -o command 'force:api:limits:display' + $ cci task run dx -o command 'limits api display' Or you can utilize the same `class_path` as the `dx` task and make a custom task that can be executed by itself or as a step in a flow. @@ -54,7 +54,7 @@ dx_limits: class_path: cumulusci.tasks.sfdx.SFDXBaseTask group: dx options: - command: sfdx force:limits:api:display + command: sf limits api display ``` In this case, we actually utilize `SFDXBaseTask`, if you would like to @@ -64,7 +64,7 @@ instead. ### Custom Deploy It is often useful to be able to define multiple custom deployment tasks -that deployg a specific subset of your projects metadata. This is +that deploy a specific subset of your projects metadata. This is particularly true when working with [unpackaged Metadata](unpackaged). Here is a custom task that is defined to only deploy only the metadata diff --git a/docs/unlocked-package.md b/docs/unlocked-package.md index b4e770131f..553478e087 100644 --- a/docs/unlocked-package.md +++ b/docs/unlocked-package.md @@ -168,7 +168,7 @@ the GitHub release operations: $ cci task run promote_package_version --version_id 04t000000000000 --promote_dependencies True ``` -Alternatively, you can use the `sfdx force:package:version:promote` +Alternatively, you can use the `sf package version promote` command to promote a 2GP package. ### Promote Dependencies From 11a6a53a69592f71e59e7ad0fe868c57693217fe Mon Sep 17 00:00:00 2001 From: James Estevez Date: Wed, 13 Nov 2024 15:00:28 -0800 Subject: [PATCH 36/65] prettier --- .github/workflows/feature_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/feature_test.yml b/.github/workflows/feature_test.yml index 31ea13dce8..8ab1f3bedf 100644 --- a/.github/workflows/feature_test.yml +++ b/.github/workflows/feature_test.yml @@ -36,8 +36,8 @@ jobs: run: uv sync --group docs - name: Build Docs run: | - cd docs - uv run sphinx-build -b html . ./_build + cd docs + uv run sphinx-build -b html . ./_build unit_tests: name: "Unit tests: ${{ matrix.os }}-${{ matrix.python-version }}" From d89b6214dd8f62f4ce947f793c93fdbe2ed98ff4 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Wed, 13 Nov 2024 15:09:32 -0800 Subject: [PATCH 37/65] rm coverage --- .github/workflows/feature_test.yml | 2 +- .github/workflows/release_test.yml | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.github/workflows/feature_test.yml b/.github/workflows/feature_test.yml index 8ab1f3bedf..9433041f85 100644 --- a/.github/workflows/feature_test.yml +++ b/.github/workflows/feature_test.yml @@ -95,7 +95,7 @@ jobs: SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} - name: Run robot tests run: | - uv run coverage run --append $(which cci) task run robot \ + uv run cci task run robot \ --org dev \ -o name "CumulusCI" \ -o suites cumulusci/robotframework/tests \ diff --git a/.github/workflows/release_test.yml b/.github/workflows/release_test.yml index e6d8b66ba9..2b988f4578 100644 --- a/.github/workflows/release_test.yml +++ b/.github/workflows/release_test.yml @@ -77,8 +77,13 @@ jobs: python-version: 3.11 cache: pip cache-dependency-path: "pyproject.toml" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true - name: Install Python dependencies - run: pip install .[test] + run: uv sync - name: Install sfdx run: | mkdir sfdx @@ -95,17 +100,17 @@ jobs: - name: Run ci_feature flow run: | cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_feature --org scratch --delete-org + uv run cci flow run ci_feature --org scratch --delete-org - name: Run ci_beta flow run: | cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_beta --org scratch --delete-org + uv run cci flow run ci_beta --org scratch --delete-org - name: Run ci_master flow run: | cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run ci_master --org scratch --delete-org + uv run cci flow run ci_master --org scratch --delete-org - name: Run release_beta flow run: | export SFDX_HUB_KEY="$(echo $SFDX_HUB_KEY_BASE64 | base64 --decode)" cd CumulusCI-Test - coverage run --append --rcfile=../pyproject.toml --source=../cumulusci $(which cci) flow run release_beta --org packaging + uv run cci flow run release_beta --org packaging From 5edef21388a580ce7e0d4918f1fc610e9c31ffe0 Mon Sep 17 00:00:00 2001 From: Paul Prescod Date: Thu, 14 Nov 2024 20:44:12 -0800 Subject: [PATCH 38/65] Cleanup some warnings (#3491) Cleanup a few warnings. I'll use comments to explain. --------- Co-authored-by: Paul Prescod Co-authored-by: James Estevez --- .prettierignore | 1 + cumulusci/core/config/org_config.py | 21 ++++-- cumulusci/core/config/tests/test_config.py | 2 +- .../dependencies/tests/test_dependencies.py | 5 ++ cumulusci/oauth/tests/test_client.py | 23 ++++-- cumulusci/tasks/apex/tests/test_apex_tasks.py | 75 +++++++++++-------- .../tasks/bulkdata/tests/test_snowfakery.py | 4 +- cumulusci/tasks/github/tests/test_release.py | 3 +- cumulusci/tasks/release_notes/parser.py | 2 +- .../tasks/salesforce/BaseRetrieveMetadata.py | 3 +- .../tasks/salesforce/RetrievePackaged.py | 4 +- .../tests/test_enable_prediction.py | 13 ++-- .../salesforce/users/tests/test_permsets.py | 17 +++-- .../tests/test_create_package_version.py | 2 +- cumulusci/utils/ziputils.py | 2 +- 15 files changed, 111 insertions(+), 66 deletions(-) diff --git a/.prettierignore b/.prettierignore index e69de29bb2..329331137b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -0,0 +1 @@ +Test*.yaml \ No newline at end of file diff --git a/cumulusci/core/config/org_config.py b/cumulusci/core/config/org_config.py index b3f191d172..e179fbbe3b 100644 --- a/cumulusci/core/config/org_config.py +++ b/cumulusci/core/config/org_config.py @@ -3,6 +3,7 @@ from collections import defaultdict, namedtuple from contextlib import contextmanager from datetime import date, datetime +from typing import Optional from urllib.parse import urlparse import requests @@ -47,14 +48,12 @@ class OrgConfig(BaseConfig): is_sandbox: bool namespace: str namespaced: bool - org_id: str org_type: str password: str scratch: bool scratch_org_type: str set_password: bool sfdx_alias: str - username: str userinfo: str id: str active: bool @@ -63,8 +62,9 @@ class OrgConfig(BaseConfig): refresh_token: str client_secret: str connected_app: str + serialization_format: str - createable: bool = None + createable: Optional[bool] = None # make sure it can be mocked for tests OAuth2Client = OAuth2Client @@ -204,7 +204,15 @@ def user_id(self): @property def org_id(self): - return self.id.split("/")[-2] + try: + if org_id := self.config.get("org_id"): + return org_id + elif hasattr(self, "id") and self.id: + return self.id.split("/")[-2] + else: + return None + except Exception as e: # pragma: no cover + assert e is None, e @property def username(self): @@ -254,7 +262,7 @@ def populate_expiration_date(self): @property def organization_sobject(self): """Cached copy of Organization sObject. Does not perform API call.""" - return self._org_sobject + return getattr(self, "_org_sobject", None) def _fetch_community_info(self): """Use the API to re-fetch information about communities""" @@ -317,7 +325,8 @@ def installed_packages(self): To check if a required package is present, call `has_minimum_package_version()` with either the namespace or 033 Id of the desired package and its version, in 1.2.3 format. - Beta version of a package are represented as "1.2.3b5", where 5 is the build number.""" + Beta version of a package are represented as "1.2.3b5", where 5 is the build number. + """ if self._installed_packages is None: isp_result = self.salesforce_client.restful( "tooling/query/?q=SELECT SubscriberPackage.Id, SubscriberPackage.NamespacePrefix, " diff --git a/cumulusci/core/config/tests/test_config.py b/cumulusci/core/config/tests/test_config.py index 9656964daa..77973526dd 100644 --- a/cumulusci/core/config/tests/test_config.py +++ b/cumulusci/core/config/tests/test_config.py @@ -66,7 +66,7 @@ def test_getattr_toplevel_key_missing(self): assert config.foo is None with mock.patch( "cumulusci.core.config.base_config.STRICT_GETATTR", True - ), pytest.raises(AssertionError): + ), pytest.deprecated_call(), pytest.raises(AssertionError): assert config.foo is None def test_getattr_child_key(self): diff --git a/cumulusci/core/dependencies/tests/test_dependencies.py b/cumulusci/core/dependencies/tests/test_dependencies.py index 3c9a2b8f0b..e0c757c7aa 100644 --- a/cumulusci/core/dependencies/tests/test_dependencies.py +++ b/cumulusci/core/dependencies/tests/test_dependencies.py @@ -645,6 +645,7 @@ def test_install(self, api_deploy_mock, zip_builder_mock, download_mock): assert mock_task.project_config == context api_deploy_mock.return_value.assert_called_once() + zf.close() def test_get_unmanaged(self): org = mock.Mock() @@ -733,6 +734,7 @@ def test_install(self, api_deploy_mock, zip_builder_mock, download_mock): assert mock_task.project_config == context api_deploy_mock.return_value.assert_called_once() + zf.close() def test_get_unmanaged(self): org = mock.Mock() @@ -793,6 +795,7 @@ def test_get_metadata_package_zip_builder__mdapi_root( }, context=mock.ANY, ) + zf.close() @mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder") @mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip") @@ -827,6 +830,7 @@ def test_get_metadata_package_zip_builder__mdapi_subfolder( }, context=mock.ANY, ) + zf.close() @mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder") @mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip") @@ -866,6 +870,7 @@ def test_get_metadata_package_zip_builder__sfdx( capture_output=True, check_return=True, ) + zf.close() class TestParseDependency: diff --git a/cumulusci/oauth/tests/test_client.py b/cumulusci/oauth/tests/test_client.py index 9f0e939cb2..430b18e9ab 100644 --- a/cumulusci/oauth/tests/test_client.py +++ b/cumulusci/oauth/tests/test_client.py @@ -13,7 +13,10 @@ import responses from requests.models import Response -from cumulusci.core.exceptions import SalesforceCredentialsException +from cumulusci.core.exceptions import ( + CumulusCIUsageError, + SalesforceCredentialsException, +) from cumulusci.core.keychain.base_project_keychain import DEFAULT_CONNECTED_APP_PORT from cumulusci.oauth.client import ( PORT_IN_USE_ERR, @@ -72,9 +75,17 @@ def http_client(client_config): @contextmanager @mock.patch("time.sleep", time.sleep) # undo mock from conftest -def httpd_thread(oauth_client): +def httpd_thread(oauth_client, expected_error=None): # call OAuth object on another thread - this spawns local httpd - thread = threading.Thread(target=oauth_client.auth_code_flow) + + def run_code_and_check_exception(): + if expected_error: + with pytest.raises(expected_error): + oauth_client.auth_code_flow() + else: + oauth_client.auth_code_flow() + + thread = threading.Thread(target=run_code_and_check_exception) thread.start() while thread.is_alive(): if oauth_client.httpd: @@ -192,7 +203,7 @@ def test_oauth_flow_error_from_auth(self, client): ) # call OAuth object on another thread - this spawns local httpd - with httpd_thread(client): + with httpd_thread(client, OAuth2Error): # simulate callback from browser with pytest.raises(urllib.error.HTTPError): urllib.request.urlopen( @@ -204,7 +215,7 @@ def test_oauth_flow_error_from_auth(self, client): sys.platform.startswith("win"), reason="setup differs from windows" ) def test_create_httpd__port_already_in_use(self, client): - with httpd_thread(client): + with httpd_thread(client, CumulusCIUsageError): with pytest.raises( OAuth2Error, match=PORT_IN_USE_ERR.format(DEFAULT_CONNECTED_APP_PORT) ): @@ -227,7 +238,7 @@ def test_oauth_flow_error_from_token(self, client): ) # call OAuth object on another thread - this spawns local httpd - with httpd_thread(client): + with httpd_thread(client, OAuth2Error): # simulate callback from browser with pytest.raises(urllib.error.HTTPError): urllib.request.urlopen(client.client_config.redirect_uri + "?code=123") diff --git a/cumulusci/tasks/apex/tests/test_apex_tasks.py b/cumulusci/tasks/apex/tests/test_apex_tasks.py index 7c9079310b..263f5fd265 100644 --- a/cumulusci/tasks/apex/tests/test_apex_tasks.py +++ b/cumulusci/tasks/apex/tests/test_apex_tasks.py @@ -8,6 +8,7 @@ import pytest import responses +from responses.matchers import query_string_matcher from simple_salesforce import SalesforceGeneralError from cumulusci.core import exceptions as exc @@ -73,9 +74,9 @@ def setup_method(self): def _mock_apex_class_query(self, name="TestClass_TEST", namespace=None): namespace_param = "null" if namespace is None else f"%27{namespace}%27" - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Name+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Name+" + f"FROM+ApexClass+WHERE+NamespacePrefix+%3D+{namespace_param}" + "+AND+%28Name+LIKE+%27%25_TEST%27%29" ) @@ -85,7 +86,10 @@ def _mock_apex_class_query(self, name="TestClass_TEST", namespace=None): "totalSize": 1, } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _get_mock_test_query_results(self, methodnames, outcomes, messages): @@ -163,16 +167,14 @@ def _get_mock_test_query_results(self, methodnames, outcomes, messages): def _get_mock_test_query_url(self, job_id): return ( - self.base_tooling_url - + "query/?q=%0ASELECT+Id%2CApexClassId%2CTestTimestamp%2C%0A+++++++Message%2CMethodName%2COutcome%2C%0A+++++++RunTime%2CStackTrace%2C%0A+++++++%28SELECT%0A++++++++++Id%2CCallouts%2CAsyncCalls%2CDmlRows%2CEmail%2C%0A++++++++++LimitContext%2CLimitExceptions%2CMobilePush%2C%0A++++++++++QueryRows%2CSosl%2CCpu%2CDml%2CSoql%0A++++++++FROM+ApexTestResults%29%0AFROM+ApexTestResult%0AWHERE+AsyncApexJobId%3D%27{}%27%0A".format( - job_id - ) + self.base_tooling_url + "query/", + f"q=%0ASELECT+Id%2CApexClassId%2CTestTimestamp%2C%0A+++++++Message%2CMethodName%2COutcome%2C%0A+++++++RunTime%2CStackTrace%2C%0A+++++++%28SELECT%0A++++++++++Id%2CCallouts%2CAsyncCalls%2CDmlRows%2CEmail%2C%0A++++++++++LimitContext%2CLimitExceptions%2CMobilePush%2C%0A++++++++++QueryRows%2CSosl%2CCpu%2CDml%2CSoql%0A++++++++FROM+ApexTestResults%29%0AFROM+ApexTestResult%0AWHERE+AsyncApexJobId%3D%27{job_id}%27%0A", ) def _get_mock_testqueueitem_status_query_url(self, job_id): return ( - self.base_tooling_url - + f"query/?q=SELECT+Id%2C+Status%2C+ExtendedStatus%2C+ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27{job_id}%27+AND+Status+%3D+%27Failed%27" + (self.base_tooling_url + "query/"), + f"q=SELECT+Id%2C+Status%2C+ExtendedStatus%2C+ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27{job_id}%27+AND+Status+%3D+%27Failed%27", ) def _mock_get_test_results( @@ -182,44 +184,50 @@ def _mock_get_test_results( job_id="JOB_ID1234567", methodname=["TestMethod"], ): - url = self._get_mock_test_query_url(job_id) + url, query_string = self._get_mock_test_query_url(job_id) expected_response = self._get_mock_test_query_results( methodname, [outcome], [message] ) responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_get_test_results_multiple( self, method_names, outcomes, messages, job_id="JOB_ID1234567" ): - url = self._get_mock_test_query_url(job_id) + url, query_string = self._get_mock_test_query_url(job_id) expected_response = self._get_mock_test_query_results( method_names, outcomes, messages ) responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_get_failed_test_classes(self, job_id="JOB_ID1234567"): - url = self._get_mock_testqueueitem_status_query_url(job_id) + url, query_string = self._get_mock_testqueueitem_status_query_url(job_id) responses.add( responses.GET, url, - match_querystring=True, + match=[query_string_matcher(query_string)], json={"totalSize": 0, "records": [], "done": True}, ) def _mock_get_failed_test_classes_failure(self, job_id="JOB_ID1234567"): - url = self._get_mock_testqueueitem_status_query_url(job_id) + url, query_string = self._get_mock_testqueueitem_status_query_url(job_id) responses.add( responses.GET, url, - match_querystring=True, + match=[query_string_matcher(query_string)], json={ "totalSize": 1, "records": [ @@ -235,14 +243,15 @@ def _mock_get_failed_test_classes_failure(self, job_id="JOB_ID1234567"): ) def _mock_get_symboltable(self): - url = ( - self.base_tooling_url - + "query/?q=SELECT+SymbolTable+FROM+ApexClass+WHERE+Name%3D%27TestClass_TEST%27" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+SymbolTable+FROM+ApexClass+WHERE+Name%3D%27TestClass_TEST%27" ) responses.add( responses.GET, url, + match=[query_string_matcher(query_string)], json={ "records": [ { @@ -265,9 +274,9 @@ def _mock_get_symboltable_failure(self): responses.add(responses.GET, url, json={"records": []}) def _mock_tests_complete(self, job_id="JOB_ID1234567"): - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Status%2C+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Status%2C+" + "ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27" + "{}%27".format(job_id) ) @@ -277,15 +286,18 @@ def _mock_tests_complete(self, job_id="JOB_ID1234567"): "records": [{"Status": "Completed"}], } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_tests_processing(self, job_id="JOB_ID1234567"): - url = ( - self.base_tooling_url - + "query/?q=SELECT+Id%2C+Status%2C+" + url = self.base_tooling_url + "query/" + query_string = ( + "q=SELECT+Id%2C+Status%2C+" + "ApexClassId+FROM+ApexTestQueueItem+WHERE+ParentJobId+%3D+%27" - + "{}%27".format(job_id) + + f"{job_id}%27" ) expected_response = { "done": True, @@ -293,7 +305,10 @@ def _mock_tests_processing(self, job_id="JOB_ID1234567"): "records": [{"Status": "Processing", "ApexClassId": 1}], } responses.add( - responses.GET, url, match_querystring=True, json=expected_response + responses.GET, + url, + match=[query_string_matcher(query_string)], + json=expected_response, ) def _mock_run_tests(self, success=True, body="JOB_ID1234567"): diff --git a/cumulusci/tasks/bulkdata/tests/test_snowfakery.py b/cumulusci/tasks/bulkdata/tests/test_snowfakery.py index de87c5503f..daa0fa2ef4 100644 --- a/cumulusci/tasks/bulkdata/tests/test_snowfakery.py +++ b/cumulusci/tasks/bulkdata/tests/test_snowfakery.py @@ -783,12 +783,11 @@ def test_explicit_channel_declarations(self, mock_load_data, create_task): "recipe": Path(__file__).parent / "snowfakery/simple_snowfakery.recipe.yml", "run_until_recipe_repeated": 15, - "recipe_options": {"xyzzy": "Nothing happens", "some_number": 42}, "loading_rules": Path(__file__).parent / "snowfakery/simple_snowfakery_channels.load.yml", }, ) - with mock.patch.object( + with pytest.warns(UserWarning), mock.patch.object( task.project_config, "keychain", DummyKeychain() ) as keychain: @@ -833,7 +832,6 @@ def test_serial_mode(self, mock_load_data, create_task): "recipe": Path(__file__).parent / "snowfakery/simple_snowfakery.recipe.yml", "run_until_recipe_repeated": 15, - "recipe_options": {"xyzzy": "Nothing happens", "some_number": 42}, "bulk_mode": "Serial", }, ) diff --git a/cumulusci/tasks/github/tests/test_release.py b/cumulusci/tasks/github/tests/test_release.py index f528e89820..abcb23f645 100644 --- a/cumulusci/tasks/github/tests/test_release.py +++ b/cumulusci/tasks/github/tests/test_release.py @@ -3,6 +3,7 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.config import ServiceConfig, TaskConfig from cumulusci.core.exceptions import GithubException, TaskOptionsError @@ -354,7 +355,7 @@ def test_run_task__with_beta_2gp(self): url=self.repo_api_url + "/releases", json=self._get_expected_release("release"), match=[ - responses.json_params_matcher( + json_params_matcher( { "tag_name": "beta/1.1", "name": "1.1", diff --git a/cumulusci/tasks/release_notes/parser.py b/cumulusci/tasks/release_notes/parser.py index 3e307cfaa7..1fef5d4e16 100644 --- a/cumulusci/tasks/release_notes/parser.py +++ b/cumulusci/tasks/release_notes/parser.py @@ -186,7 +186,7 @@ class GithubIssuesParser(IssuesParser): def __new__(cls, release_notes_generator, title, issue_regex=None): if not release_notes_generator.has_issues: - logging.getLogger(__file__).warn( + logging.getLogger(__file__).warning( "Issues are disabled for this repository. Falling back to change notes parser." ) return GithubLinesParser(release_notes_generator, title) diff --git a/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py b/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py index b8bb7da5f5..65e5a8be42 100644 --- a/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py +++ b/cumulusci/tasks/salesforce/BaseRetrieveMetadata.py @@ -1,4 +1,5 @@ import functools +from zipfile import ZipFile from cumulusci.tasks.salesforce.BaseSalesforceMetadataApiTask import ( BaseSalesforceMetadataApiTask, @@ -52,6 +53,6 @@ def _process_namespace(self, src_zip): ) return src_zip - def _extract_zip(self, src_zip): + def _extract_zip(self, src_zip: ZipFile): src_zip = self._process_namespace(src_zip) src_zip.extractall(self.options["path"]) diff --git a/cumulusci/tasks/salesforce/RetrievePackaged.py b/cumulusci/tasks/salesforce/RetrievePackaged.py index 25ddddf57e..6a24d1f5e4 100644 --- a/cumulusci/tasks/salesforce/RetrievePackaged.py +++ b/cumulusci/tasks/salesforce/RetrievePackaged.py @@ -1,3 +1,5 @@ +from zipfile import ZipFile + from cumulusci.salesforce_api.metadata import ApiRetrievePackaged from cumulusci.tasks.salesforce import BaseRetrieveMetadata from cumulusci.utils import zip_subfolder @@ -34,6 +36,6 @@ def _get_api(self): self, self.options["package"], self.options.get("api_version") ) - def _extract_zip(self, src_zip): + def _extract_zip(self, src_zip: ZipFile): src_zip = zip_subfolder(src_zip, self.options.get("package")) super(RetrievePackaged, self)._extract_zip(src_zip) diff --git a/cumulusci/tasks/salesforce/tests/test_enable_prediction.py b/cumulusci/tasks/salesforce/tests/test_enable_prediction.py index 194c0e4a19..101860177c 100644 --- a/cumulusci/tasks/salesforce/tests/test_enable_prediction.py +++ b/cumulusci/tasks/salesforce/tests/test_enable_prediction.py @@ -1,5 +1,6 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.config.org_config import OrgConfig from cumulusci.core.exceptions import CumulusCIException @@ -89,12 +90,12 @@ def test_run_task(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) task() @@ -164,12 +165,12 @@ def test_run_task__namespaced_org(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( @@ -222,12 +223,12 @@ def test_run_task__managed_org(mock_oauth, task): mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/001", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) mock_oauth.add( method="PATCH", url=f"https://test-dev-ed.my.salesforce.com/services/data/v{CURRENT_SF_API_VERSION}/tooling/sobjects/MLPredictionDefinition/002", - match=[responses.json_params_matcher({"Metadata": {"status": "Enabled"}})], + match=[json_params_matcher({"Metadata": {"status": "Enabled"}})], ) task() diff --git a/cumulusci/tasks/salesforce/users/tests/test_permsets.py b/cumulusci/tasks/salesforce/users/tests/test_permsets.py index 65b6a97119..96bdf2de70 100644 --- a/cumulusci/tasks/salesforce/users/tests/test_permsets.py +++ b/cumulusci/tasks/salesforce/users/tests/test_permsets.py @@ -3,6 +3,7 @@ import pytest import responses +from responses.matchers import json_params_matcher from cumulusci.core.exceptions import CumulusCIException from cumulusci.tasks.salesforce.tests.util import create_task @@ -68,7 +69,7 @@ def test_create_permset(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -152,7 +153,7 @@ def test_create_permset__alias(self): {"id": "0Pa000000000001", "success": True, "errors": []}, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -383,7 +384,7 @@ def test_create_permset_partial_success_raises(self, table): }, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -472,7 +473,7 @@ def test_create_permsetlicense(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -547,7 +548,7 @@ def test_create_permsetlicense__no_assignments(self): {"id": "0Pa000000000001", "success": True, "errors": []}, ], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -634,7 +635,7 @@ def test_create_permsetlicense__alias(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -782,7 +783,7 @@ def test_create_permsetgroup(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ @@ -855,7 +856,7 @@ def test_create_permsetgroup__alias(self): status=200, json=[{"id": "0Pa000000000001", "success": True, "errors": []}], match=[ - responses.json_params_matcher( + json_params_matcher( { "allOrNone": False, "records": [ diff --git a/cumulusci/tasks/tests/test_create_package_version.py b/cumulusci/tasks/tests/test_create_package_version.py index 6d096012b5..e972529db8 100644 --- a/cumulusci/tasks/tests/test_create_package_version.py +++ b/cumulusci/tasks/tests/test_create_package_version.py @@ -472,7 +472,7 @@ def test_run_task( return_value=devhub_config, ): task() - + zf.close() assert task.return_values["dependencies"] == [ {"version_id": "04t000000000009AAA"} ] diff --git a/cumulusci/utils/ziputils.py b/cumulusci/utils/ziputils.py index dabb365eb0..f1ae0ead3e 100644 --- a/cumulusci/utils/ziputils.py +++ b/cumulusci/utils/ziputils.py @@ -3,7 +3,7 @@ import zipfile -def zip_subfolder(zip_src, path): +def zip_subfolder(zip_src: zipfile.ZipFile, path): if not path.endswith("/"): path = path + "/" From 5ae0e51f65e1ffc8ffd7911f92dca3635a55b409 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Fri, 15 Nov 2024 13:33:27 -0800 Subject: [PATCH 39/65] Fix readthedocs configuration (#3845) - Rename readthedocs.yml to .readthedocs.yml, - update to Ubuntu 22.04 and Python 3.12, - add uv setup and Sphinx build commands. --- readthedocs.yml => .readthedocs.yml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) rename readthedocs.yml => .readthedocs.yml (59%) diff --git a/readthedocs.yml b/.readthedocs.yml similarity index 59% rename from readthedocs.yml rename to .readthedocs.yml index 041aaf738f..4f0f038758 100644 --- a/readthedocs.yml +++ b/.readthedocs.yml @@ -7,9 +7,15 @@ version: 2 # Set the version of Python and other tools you might need build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.9" + python: "3.12" + commands: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + - uv sync --only-group docs --frozen + - uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx sphinx: @@ -19,8 +25,3 @@ sphinx: formats: - pdf - epub - -# Optionally declare the Python requirements required to build your docs -python: - install: - - requirements: requirements_dev.txt From 1f322d9950f04fa9971a765b8355b60708363e94 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Fri, 15 Nov 2024 17:43:54 -0800 Subject: [PATCH 40/65] Test SFDX release candidate every week (#3558) This PR extracts the SFDX integration tests into a reusable workflow so that we can test against the SFDX CLI's release candidates. It is intended to provide early warning of breaking changes. Edited to add: - integration test fixes for v4 that were missed in #3844 - `docutils`, which is required by `rst2ansi` --- .github/workflows/chores.yml | 22 +++-- .github/workflows/release_test.yml | 86 +++---------------- .github/workflows/release_test_sfdx.yml | 88 ++++++++++++++++++++ .github/workflows/slow_integration_tests.yml | 33 ++++---- Makefile | 8 +- pyproject.toml | 1 + uv.lock | 2 + 7 files changed, 141 insertions(+), 99 deletions(-) create mode 100644 .github/workflows/release_test_sfdx.yml diff --git a/.github/workflows/chores.yml b/.github/workflows/chores.yml index 161f92b800..9fcb788995 100644 --- a/.github/workflows/chores.yml +++ b/.github/workflows/chores.yml @@ -12,23 +12,21 @@ jobs: hub_version: ${{ steps.devhub-api-version.outputs.hub_version }} cci_version: ${{ steps.cci-api-version.outputs.cci_version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: main - - name: Set up Python - uses: actions/setup-python@v4 - name: Get Dev Hub API Version id: devhub-api-version env: HUB_URL: ${{ format('{0}/services/data', secrets.SFDO_HUB_URL) }} run: | version=$(curl -s $HUB_URL | jq -r '.[-1] | .version') - echo "::set-output name=hub_version::$version" + echo "hub_version=$version" >> $GITHUB_OUTPUT - name: Get CURRENT_SF_API_VERSION id: cci-api-version run: | version=$(yq '.project.package.api_version' cumulusci/cumulusci.yml) - echo "::set-output name=cci_version::$version" + echo "cci_version=$version" >> $GITHUB_OUTPUT update_api_versions: runs-on: SFDO-Tooling-Ubuntu needs: check_api_versions @@ -36,7 +34,7 @@ jobs: env: VERSION: ${{ needs.check_api_versions.outputs.hub_version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 ref: main @@ -58,3 +56,15 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | gh pr create --fill --label 'auto-pr' + test_sfdx_release_candidate: + uses: ./.github/workflows/release_test_sfdx.yml + with: + sfdx-release-channel: stable-rc + secrets: + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + CCITEST_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} diff --git a/.github/workflows/release_test.yml b/.github/workflows/release_test.yml index 2b988f4578..b3d7c2a917 100644 --- a/.github/workflows/release_test.yml +++ b/.github/workflows/release_test.yml @@ -1,35 +1,9 @@ name: Release Test on: + workflow_dispatch: pull_request: types: [opened, synchronize, reopened] # Default - workflow_call: - secrets: - CUMULUSCI_ORG_packaging: - required: true - CUMULUSCI_SERVICE_github: - required: true - CCITEST_APP_KEY: - required: true - SFDX_CLIENT_ID: - required: true - SFDX_HUB_KEY: - required: true - SFDX_HUB_KEY_BASE64: - required: true - SFDX_HUB_USERNAME: - required: true - -env: - CUMULUSCI_ORG_scratch: '{"config_file": "orgs/dev.json", "scratch": true}' - CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} - CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} - GITHUB_APP_ID: 129383 - GITHUB_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} - SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} - SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} - SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} - SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} jobs: test_artifacts: @@ -67,50 +41,14 @@ jobs: test_release: name: "Test Release Flows" - runs-on: SFDO-Tooling-Ubuntu - concurrency: release - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - cache: pip - cache-dependency-path: "pyproject.toml" - - name: Set up uv - uses: SFDO-Tooling/setup-uv@main - with: - version: "0.5.0" - enable-cache: true - - name: Install Python dependencies - run: uv sync - - name: Install sfdx - run: | - mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sfdx-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 - echo $(realpath sfdx/bin) >> $GITHUB_PATH - - name: Authenticate Dev Hub - run: | - sfdx plugins --core - echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub - - name: Check out CumulusCI-Test - run: | - git clone https://github.com/SFDO-Tooling/CumulusCI-Test - - name: Run ci_feature flow - run: | - cd CumulusCI-Test - uv run cci flow run ci_feature --org scratch --delete-org - - name: Run ci_beta flow - run: | - cd CumulusCI-Test - uv run cci flow run ci_beta --org scratch --delete-org - - name: Run ci_master flow - run: | - cd CumulusCI-Test - uv run cci flow run ci_master --org scratch --delete-org - - name: Run release_beta flow - run: | - export SFDX_HUB_KEY="$(echo $SFDX_HUB_KEY_BASE64 | base64 --decode)" - cd CumulusCI-Test - uv run cci flow run release_beta --org packaging + uses: ./.github/workflows/release_test_sfdx.yml + with: + sfdx-release-channel: stable + secrets: + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + CCITEST_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} diff --git a/.github/workflows/release_test_sfdx.yml b/.github/workflows/release_test_sfdx.yml new file mode 100644 index 0000000000..00eb55719e --- /dev/null +++ b/.github/workflows/release_test_sfdx.yml @@ -0,0 +1,88 @@ +name: SFDX Integration Test + +on: + workflow_call: + inputs: + sfdx-release-channel: + required: false + type: string + default: stable + secrets: + CUMULUSCI_ORG_packaging: + required: true + CUMULUSCI_SERVICE_github: + required: true + CCITEST_APP_KEY: + required: true + SFDX_CLIENT_ID: + required: true + SFDX_HUB_KEY: + required: true + SFDX_HUB_KEY_BASE64: + required: true + SFDX_HUB_USERNAME: + required: true + +env: + CUMULUSCI_ORG_scratch: '{"config_file": "orgs/dev.json", "scratch": true}' + CUMULUSCI_ORG_packaging: ${{ secrets.CUMULUSCI_ORG_packaging }} + CUMULUSCI_SERVICE_github: ${{ secrets.CUMULUSCI_SERVICE_github }} + GITHUB_APP_ID: 129383 + GITHUB_APP_KEY: ${{ secrets.CCITEST_APP_KEY }} + SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} + SFDX_HUB_KEY: ${{ secrets.SFDX_HUB_KEY }} + SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} + SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} + +jobs: + test_release: + name: "Test SFDX CLI" + runs-on: SFDO-Tooling-Ubuntu + concurrency: release + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + cache: pip + cache-dependency-path: "pyproject.toml" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install Python dependencies + run: uv sync + - name: Install Salesforce CLI + env: + CHANNEL: ${{ inputs.sfdx-release-channel }} + run: | + mkdir sfdx + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/$CHANNEL/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + echo $(realpath sfdx/bin) >> $GITHUB_PATH + - name: Authenticate Dev Hub + run: | + sf plugins --core + echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub + - name: Check out CumulusCI-Test + run: | + git clone https://github.com/SFDO-Tooling/CumulusCI-Test + - name: Run ci_feature flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_feature --org scratch --delete-org + - name: Run ci_beta flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_beta --org scratch --delete-org + - name: Run ci_master flow + run: | + cd CumulusCI-Test + uv run cci flow run ci_master --org scratch --delete-org + - name: Run release_beta flow + run: | + export SFDX_HUB_KEY="$(echo $SFDX_HUB_KEY_BASE64 | base64 --decode)" + cd CumulusCI-Test + uv run cci flow run release_beta --org packaging diff --git a/.github/workflows/slow_integration_tests.yml b/.github/workflows/slow_integration_tests.yml index 0f75321ed6..73c27c5767 100644 --- a/.github/workflows/slow_integration_tests.yml +++ b/.github/workflows/slow_integration_tests.yml @@ -31,20 +31,23 @@ jobs: python-version: 3.11 cache: pip cache-dependency-path: "pyproject.toml" - - name: Install Python dependencies - run: | - python -m pip install -U pip - pip install .[test] - - name: Install sfdx + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install dependencies + run: uv sync -p 3.11 + - name: Install Salesforce CLI run: | mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sfdx-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/stable/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 echo $(realpath sfdx/bin) >> $GITHUB_PATH - name: Authenticate Dev Hub run: | - sfdx plugins --core + sf plugins --core echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sfdx auth:jwt:grant --clientid $SFDX_CLIENT_ID --jwtkeyfile sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub env: SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} @@ -54,7 +57,7 @@ jobs: - name: Delete scratch org if: always() run: | - cci org scratch_delete pytest + uv run cci org scratch_delete pytest robot_ui: name: "Robot: ${{ matrix.job-name }}" runs-on: SFDO-Tooling-Ubuntu @@ -80,26 +83,26 @@ jobs: cache: pip cache-dependency-path: "pyproject.toml" - name: Install Python dependencies - run: pip install .[test] - - name: Install sfdx + run: pip install . + - name: Install Salesforce CLI run: | mkdir sfdx - wget -qO- https://developer.salesforce.com/media/salesforce-cli/sfdx/channels/stable/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 + wget -qO- https://developer.salesforce.com/media/salesforce-cli/sf/channels/stable/sf-linux-x64.tar.xz | tar xJ -C sfdx --strip-components 1 echo $(realpath sfdx/bin) >> $GITHUB_PATH - name: Initialize Browser/Playwright run: cci robot install_playwright - name: Authenticate Dev Hub run: | - sfdx plugins --core + sf plugins --core echo $SFDX_HUB_KEY_BASE64 | base64 --decode > sfdx.key - sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --setdefaultdevhubusername -a hub + sf org login jwt --client-id $SFDX_CLIENT_ID --jwt-key-file sfdx.key --username $SFDX_HUB_USERNAME --set-default-dev-hub --alias hub env: SFDX_HUB_KEY_BASE64: ${{ secrets.SFDX_HUB_KEY_BASE64 }} SFDX_CLIENT_ID: ${{ secrets.SFDX_CLIENT_ID }} SFDX_HUB_USERNAME: ${{ secrets.SFDX_HUB_USERNAME }} - name: Run robot tests run: | - coverage run --append $(which cci) task run robot \ + cci task run robot \ --org ${{ matrix.org-shape }} \ -o suites cumulusci/robotframework/tests/salesforce \ -o exclude no-browser \ diff --git a/Makefile b/Makefile index cce895e0c8..19b7ada463 100644 --- a/Makefile +++ b/Makefile @@ -64,13 +64,13 @@ coverage: ## check code coverage quickly with the default Python $(BROWSER) htmlcov/index.html vcr: # remake VCR cassettes and run other integration tests - cci org scratch qa pytest - cci org scratch_delete pytest + uv run cci org scratch qa pytest + uv run cci org scratch_delete pytest find . -name \Test*.yaml | xargs rm - pytest --org qa --run-slow-tests -rs --replace-vcrs + uv run pytest --org qa --run-slow-tests -rs --replace-vcrs slow_tests: vcr # remake VCR cassettes and run other integration tests - cci org scratch_delete pytest + uv run cci org scratch_delete pytest pytest integration_tests/ --org pytest -rs docs: ## generate Sphinx HTML documentation diff --git a/pyproject.toml b/pyproject.toml index 27d3ba95cc..585e7f4654 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ dependencies = [ "simple-salesforce==1.11.4", "snowfakery>=4.0.0", "xmltodict", + "docutils>=0.21.2", ] [dependency-groups] diff --git a/uv.lock b/uv.lock index ddb238bec1..0d39fb87b6 100644 --- a/uv.lock +++ b/uv.lock @@ -339,6 +339,7 @@ dependencies = [ { name = "click" }, { name = "cryptography" }, { name = "defusedxml" }, + { name = "docutils" }, { name = "faker" }, { name = "fs" }, { name = "github3-py" }, @@ -401,6 +402,7 @@ requires-dist = [ { name = "click", specifier = ">=8.1" }, { name = "cryptography" }, { name = "defusedxml" }, + { name = "docutils", specifier = ">=0.21.2" }, { name = "faker" }, { name = "fs" }, { name = "github3-py" }, From abe92e7ef01a7348e30bdaf53048d13cc8a939f8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 21:03:30 +0000 Subject: [PATCH 41/65] Release v4.0.1 (#3846) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: James Estevez --- cumulusci/__about__.py | 2 +- docs/history.md | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index ce1305bf4e..76ad18b89a 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "4.0.0" +__version__ = "4.0.1" diff --git a/docs/history.md b/docs/history.md index 536b806184..5ff75547d1 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,6 +2,16 @@ +## v4.0.1 (2024-11-18) + +### Issues Fixed 🩴 + +- Fixed a ModuleNotFoundError for docutils by adding the dependency `docutils` by [@jstvz](https://github.com/jstvz) in [#3558](https://github.com/SFDO-Tooling/CumulusCI/pull/3558). + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.0...v4.0.1 + + + ## v4.0.0 (2024-11-12) ## What's Changed @@ -14,8 +24,6 @@ **Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v3.92.0...v4.0.0 - - ## v3.93.0 (2024-10-17) From 138241e38037720eee2c954bf3f3f4d7187f8ff0 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 02:47:04 +0530 Subject: [PATCH 42/65] Functionality for Select+Insert --- cumulusci/tasks/bulkdata/load.py | 24 +- cumulusci/tasks/bulkdata/mapping_parser.py | 1 + .../tasks/bulkdata/query_transformers.py | 20 +- cumulusci/tasks/bulkdata/select_utils.py | 251 +++++++++-- cumulusci/tasks/bulkdata/step.py | 234 +++++++--- ...lect_invalid_threshold__invalid_number.yml | 21 + ...ct_invalid_threshold__invalid_strategy.yml | 21 + ...ng_select_invalid_threshold__non_float.yml | 21 + cumulusci/tasks/bulkdata/tests/test_load.py | 6 +- .../bulkdata/tests/test_mapping_parser.py | 29 ++ .../tasks/bulkdata/tests/test_select_utils.py | 411 ++++++++++++++++-- cumulusci/tasks/bulkdata/tests/test_step.py | 87 +++- 12 files changed, 960 insertions(+), 166 deletions(-) create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml create mode 100644 cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index 9a2f08ee90..0732d57777 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -312,10 +312,14 @@ def _execute_step( def process_lookup_fields(self, mapping, fields, polymorphic_fields): """Modify fields and priority fields based on lookup and polymorphic checks.""" + # Store the lookups and their original order for re-insertion at the end + original_lookups = [name for name in fields if name in mapping.lookups] + max_insert_index = -1 for name, lookup in mapping.lookups.items(): if name in fields: # Get the index of the lookup field before removing it insert_index = fields.index(name) + max_insert_index = max(max_insert_index, insert_index) # Remove the lookup field from fields fields.remove(name) @@ -351,7 +355,7 @@ def process_lookup_fields(self, mapping, fields, polymorphic_fields): None, ) if lookup_mapping_step: - lookup_fields = lookup_mapping_step.get_load_field_list() + lookup_fields = lookup_mapping_step.fields.keys() # Insert fields in the format {relationship_name}.{ref_type}.{lookup_field} for field in lookup_fields: fields.insert( @@ -359,6 +363,7 @@ def process_lookup_fields(self, mapping, fields, polymorphic_fields): f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}", ) insert_index += 1 + max_insert_index = max(max_insert_index, insert_index) if lookup_in_priority_fields: mapping.select_options.priority_fields[ f"{relationship_name}.{lookup_mapping_step.sf_object}.{field}" @@ -383,17 +388,24 @@ def process_lookup_fields(self, mapping, fields, polymorphic_fields): if lookup_mapping_step: relationship_name = polymorphic_fields[name]["relationshipName"] - lookup_fields = lookup_mapping_step.get_load_field_list() + lookup_fields = lookup_mapping_step.fields.keys() # Insert the new fields at the same position as the removed lookup field for field in lookup_fields: fields.insert(insert_index, f"{relationship_name}.{field}") insert_index += 1 + max_insert_index = max(max_insert_index, insert_index) if lookup_in_priority_fields: mapping.select_options.priority_fields[ f"{relationship_name}.{field}" ] = f"{relationship_name}.{field}" + # Append the original lookups at the end in the same order + for name in original_lookups: + if name not in fields: + fields.insert(max_insert_index, name) + max_insert_index += 1 + def configure_step(self, mapping): """Create a step appropriate to the action""" bulk_mode = mapping.bulk_mode or self.bulk_mode or "Parallel" @@ -479,6 +491,7 @@ def configure_step(self, mapping): selection_filter=mapping.select_options.filter, selection_priority_fields=mapping.select_options.priority_fields, content_type=content_type, + threshold=mapping.select_options.threshold, ) return step, query @@ -588,10 +601,9 @@ def _query_db(self, mapping): mapping, self.mapping, self.metadata, model, self._old_format ) ) - else: - transformers.append( - AddLookupsToQuery(mapping, self.metadata, model, self._old_format) - ) + transformers.append( + AddLookupsToQuery(mapping, self.metadata, model, self._old_format) + ) transformers.extend([cls(mapping, self.metadata, model) for cls in classes]) diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index 1593dc97a1..e630d564c6 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -31,6 +31,7 @@ class MappingLookup(CCIDictModel): join_field: Optional[str] = None after: Optional[str] = None aliased_table: Optional[Any] = None + parent_tables: Optional[Any] = None name: Optional[str] = None # populated by parent def get_lookup_key_field(self, model=None): diff --git a/cumulusci/tasks/bulkdata/query_transformers.py b/cumulusci/tasks/bulkdata/query_transformers.py index f99689618e..181736a4bc 100644 --- a/cumulusci/tasks/bulkdata/query_transformers.py +++ b/cumulusci/tasks/bulkdata/query_transformers.py @@ -106,14 +106,14 @@ def columns_to_add(self): columns = [] for lookup in self.lookups: tables = lookup.table if isinstance(lookup.table, list) else [lookup.table] - lookup.aliased_table = [ + lookup.parent_tables = [ aliased( self.metadata.tables[table], name=f"{lookup.name}_{table}_alias" ) for table in tables ] - for aliased_table, table_name in zip(lookup.aliased_table, tables): + for parent_table, table_name in zip(lookup.parent_tables, tables): # Find the mapping step for this polymorphic type lookup_mapping_step = next( ( @@ -124,24 +124,24 @@ def columns_to_add(self): None, ) if lookup_mapping_step: - load_fields = lookup_mapping_step.get_load_field_list() + load_fields = lookup_mapping_step.fields.keys() for field in load_fields: if field in lookup_mapping_step.fields: matching_column = next( ( col - for col in aliased_table.columns + for col in parent_table.columns if col.name == lookup_mapping_step.fields[field] ) ) columns.append( - matching_column.label(f"{aliased_table.name}_{field}") + matching_column.label(f"{parent_table.name}_{field}") ) else: # Append an empty string if the field is not present columns.append( literal_column("''").label( - f"{aliased_table.name}_{field}" + f"{parent_table.name}_{field}" ) ) return columns @@ -150,15 +150,15 @@ def columns_to_add(self): def outerjoins_to_add(self): """Add outer joins for each lookup table directly, including handling for polymorphic lookups.""" - def join_for_lookup(lookup, aliased_table): + def join_for_lookup(lookup, parent_table): key_field = lookup.get_lookup_key_field(self.model) value_column = getattr(self.model, key_field) - return (aliased_table, aliased_table.columns.id == value_column) + return (parent_table, parent_table.columns.id == value_column) joins = [] for lookup in self.lookups: - for aliased_table in lookup.aliased_table: - joins.append(join_for_lookup(lookup, aliased_table)) + for parent_table in lookup.parent_tables: + joins.append(join_for_lookup(lookup, parent_table)) return joins diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index f5800f9b38..7412a38ae4 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -6,7 +6,7 @@ import numpy as np import pandas as pd from annoy import AnnoyIndex -from pydantic import Field, validator +from pydantic import Field, root_validator, validator from sklearn.feature_extraction.text import HashingVectorizer from sklearn.preprocessing import StandardScaler @@ -45,6 +45,7 @@ class SelectOptions(CCIDictModel): filter: T.Optional[str] = None # Optional filter for selection strategy: SelectStrategy = SelectStrategy.STANDARD # Strategy for selection priority_fields: T.Dict[str, str] = Field({}) + threshold: T.Optional[float] = None @validator("strategy", pre=True) def validate_strategy(cls, value): @@ -66,6 +67,26 @@ def standardize_fields_to_dict(cls, values): values = {elem: elem for elem in values} return CaseInsensitiveDict(values) + @root_validator + def validate_threshold_and_strategy(cls, values): + threshold = values.get("threshold") + strategy = values.get("strategy") + + if threshold is not None: + values["threshold"] = float(threshold) # Convert to float + + if not (0 <= values["threshold"] <= 1): + raise ValueError( + f"Threshold must be between 0 and 1, got {values['threshold']}." + ) + + if strategy != SelectStrategy.SIMILARITY: + raise ValueError( + "If a threshold is specified, the strategy must be set to 'similarity'." + ) + + return values + class SelectOperationExecutor: def __init__(self, strategy: SelectStrategy): @@ -84,6 +105,7 @@ def select_generate_query( limit: T.Union[int, None], offset: T.Union[int, None], ): + _, select_fields = split_and_filter_fields(fields=fields) # For STANDARD strategy if self.strategy == SelectStrategy.STANDARD: return standard_generate_query( @@ -93,7 +115,7 @@ def select_generate_query( elif self.strategy == SelectStrategy.SIMILARITY: return similarity_generate_query( sobject=sobject, - fields=fields, + fields=select_fields, user_filter=user_filter, limit=limit, offset=offset, @@ -108,9 +130,11 @@ def select_post_process( self, load_records, query_records: list, + fields: list, num_records: int, sobject: str, weights: list, + threshold: T.Union[float, None], ): # For STANDARD strategy if self.strategy == SelectStrategy.STANDARD: @@ -122,8 +146,10 @@ def select_post_process( return similarity_post_process( load_records=load_records, query_records=query_records, + fields=fields, sobject=sobject, weights=weights, + threshold=threshold, ) # For RANDOM strategy elif self.strategy == SelectStrategy.RANDOM: @@ -158,12 +184,12 @@ def standard_generate_query( def standard_post_process( query_records: list, num_records: int, sobject: str -) -> T.Tuple[T.List[dict], T.Union[str, None]]: +) -> T.Tuple[T.List[dict], None, T.Union[str, None]]: """Processes the query results for the standard selection strategy""" # Handle case where query returns 0 records if not query_records: error_message = f"No records found for {sobject} in the target org." - return [], error_message + return [], None, error_message # Add 'success: True' to each record to emulate records have been inserted selected_records = [ @@ -177,7 +203,7 @@ def standard_post_process( selected_records.extend(original_records) selected_records = selected_records[:num_records] - return selected_records, None # Return selected records and None for error + return selected_records, None, None # Return selected records and None for error def similarity_generate_query( @@ -255,13 +281,20 @@ def similarity_generate_query( def similarity_post_process( - load_records, query_records: list, sobject: str, weights: list -) -> T.Tuple[T.List[dict], T.Union[str, None]]: + load_records, + query_records: list, + fields: list, + sobject: str, + weights: list, + threshold: T.Union[float, None], +) -> T.Tuple[ + T.List[T.Union[dict, None]], T.List[T.Union[list, None]], T.Union[str, None] +]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records - if not query_records: + if not query_records and not threshold: error_message = f"No records found for {sobject} in the target org." - return [], error_message + return [], [], error_message load_records = list(load_records) # Replace None values in each row with empty strings @@ -272,23 +305,55 @@ def similarity_post_process( complexity_constant = load_record_count * query_record_count - closest_records = [] + select_records = [] + insert_records = [] if complexity_constant < 1000: - closest_records = levenshtein_post_process(load_records, query_records, weights) + select_records, insert_records = levenshtein_post_process( + load_records, query_records, fields, weights, threshold + ) else: - closest_records = annoy_post_process(load_records, query_records, weights) + select_records, insert_records = annoy_post_process( + load_records, query_records, fields, weights, threshold + ) - return closest_records + return select_records, insert_records, None def annoy_post_process( - load_records: list, query_records: list, weights: list -) -> T.Tuple[T.List[dict], T.Union[str, None]]: + load_records: list, + query_records: list, + all_fields: list, + similarity_weights: list, + threshold: T.Union[float, None], +) -> T.Tuple[T.List[dict], list]: """Processes the query results for the similarity selection strategy using Annoy algorithm for large number of records""" + selected_records = [] + insertion_candidates = [] + + # Split fields into load and select categories + load_field_list, select_field_list = split_and_filter_fields(fields=all_fields) + # Only select those weights for select field list + similarity_weights = [ + similarity_weights[idx] + for idx, field in enumerate(all_fields) + if field in select_field_list + ] + load_shaped_records = reorder_records( + records=load_records, original_fields=all_fields, new_fields=load_field_list + ) + select_shaped_records = reorder_records( + records=load_records, original_fields=all_fields, new_fields=select_field_list + ) + + if not query_records: + # Directly append to load record for insertion if target_records is empty + selected_records = [None for _ in load_records] + insertion_candidates = load_shaped_records + return selected_records, insertion_candidates query_records = replace_empty_strings_with_missing(query_records) - load_records = replace_empty_strings_with_missing(load_records) + select_shaped_records = replace_empty_strings_with_missing(select_shaped_records) hash_features = 100 num_trees = 10 @@ -302,7 +367,10 @@ def annoy_post_process( } final_load_vectors, final_query_vectors = vectorize_records( - load_records, query_record_data, hash_features=hash_features, weights=weights + select_shaped_records, + query_record_data, + hash_features=hash_features, + weights=similarity_weights, ) # Create Annoy index for nearest neighbor search @@ -318,49 +386,89 @@ def annoy_post_process( # Find nearest neighbors for each query vector n_neighbors = 1 - closest_records = [] - for i, load_vector in enumerate(final_load_vectors): # Get nearest neighbors' indices and distances nearest_neighbors = annoy_index.get_nns_by_vector( load_vector, n_neighbors, include_distances=True ) neighbor_indices = nearest_neighbors[0] # Indices of nearest neighbors + neighbor_distances = [ + distance / 2 for distance in nearest_neighbors[1] + ] # Distances sqrt(2(1-cos(u,v)))/2 lies between [0,1] - for neighbor_index in neighbor_indices: + for idx, neighbor_index in enumerate(neighbor_indices): # Retrieve the corresponding record from the database record = query_record_data[neighbor_index] closest_record_id = record_to_id_map[tuple(record)] - closest_records.append( - {"id": closest_record_id, "success": True, "created": False} - ) + if threshold and (neighbor_distances[idx] >= threshold): + selected_records.append(None) + insertion_candidates.append(load_shaped_records[i]) + else: + selected_records.append( + {"id": closest_record_id, "success": True, "created": False} + ) - return closest_records, None + return selected_records, insertion_candidates def levenshtein_post_process( - load_records: list, query_records: list, weights: list -) -> T.Tuple[T.List[dict], T.Union[str, None]]: - """Processes the query results for the similarity selection strategy using Levenshtein algorithm for small number of records""" - closest_records = [] - - for record in load_records: - closest_record = find_closest_record(record, query_records, weights) - closest_records.append( - {"id": closest_record[0], "success": True, "created": False} + source_records: list, + target_records: list, + all_fields: list, + similarity_weights: list, + distance_threshold: T.Union[float, None], +) -> T.Tuple[T.List[T.Optional[dict]], T.List[T.Optional[list]]]: + """Processes query results using Levenshtein algorithm for similarity selection with a small number of records.""" + selected_records = [] + insertion_candidates = [] + + # Split fields into load and select categories + load_field_list, select_field_list = split_and_filter_fields(fields=all_fields) + # Only select those weights for select field list + similarity_weights = [ + similarity_weights[idx] + for idx, field in enumerate(all_fields) + if field in select_field_list + ] + load_shaped_records = reorder_records( + records=source_records, original_fields=all_fields, new_fields=load_field_list + ) + select_shaped_records = reorder_records( + records=source_records, original_fields=all_fields, new_fields=select_field_list + ) + + if not target_records: + # Directly append to load record for insertion if target_records is empty + selected_records = [None for _ in source_records] + insertion_candidates = load_shaped_records + return selected_records, insertion_candidates + + for select_record, load_record in zip(select_shaped_records, load_shaped_records): + closest_match, match_distance = find_closest_record( + select_record, target_records, similarity_weights ) - return closest_records, None + if distance_threshold and match_distance > distance_threshold: + # Append load record for insertion if distance exceeds threshold + insertion_candidates.append(load_record) + selected_records.append(None) + elif closest_match: + # Append match details if distance is within threshold + selected_records.append( + {"id": closest_match[0], "success": True, "created": False} + ) + + return selected_records, insertion_candidates def random_post_process( query_records: list, num_records: int, sobject: str -) -> T.Tuple[T.List[dict], T.Union[str, None]]: +) -> T.Tuple[T.List[dict], None, T.Union[str, None]]: """Processes the query results for the random selection strategy""" if not query_records: error_message = f"No records found for {sobject} in the target org." - return [], error_message + return [], None, error_message selected_records = [] for _ in range(num_records): # Loop 'num_records' times @@ -370,7 +478,7 @@ def random_post_process( {"id": random_record[0], "success": True, "created": False} ) - return selected_records, None + return selected_records, None, None def find_closest_record(load_record: list, query_records: list, weights: list): @@ -383,7 +491,7 @@ def find_closest_record(load_record: list, query_records: list, weights: list): closest_distance = distance closest_record = record - return closest_record + return closest_record, closest_distance def levenshtein_distance(str1: str, str2: str): @@ -417,7 +525,6 @@ def calculate_levenshtein_distance(record1: list, record2: list, weights: list): raise ValueError("Records must be same size as fields (weights).") total_distance = 0 - total_fields = 0 for field1, field2, weight in zip(record1, record2, weights): field1 = field1.lower() @@ -427,16 +534,19 @@ def calculate_levenshtein_distance(record1: list, record2: list, weights: list): # If both fields are blank, distance is 0 distance = 0 else: - distance = levenshtein_distance(field1, field2) + # Average distance per character + distance = levenshtein_distance(field1, field2) / max( + len(field1), len(field2) + ) if len(field1) == 0 or len(field2) == 0: # If one field is blank, reduce the impact of the distance distance = distance * 0.05 # Fixed value for blank vs non-blank # Multiply the distance by the corresponding weight total_distance += distance * weight - total_fields += 1 - return total_distance / total_fields if total_fields > 0 else 0 + # Average distance per character with weights + return total_distance / sum(weights) if len(weights) else 0 def add_limit_offset_to_user_filter( @@ -600,3 +710,60 @@ def replace_empty_strings_with_missing(records): [(field if field != "" else "missing") for field in record] for record in records ] + + +def split_and_filter_fields(fields: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]: + # List to store non-lookup fields (load fields) + load_fields = [] + + # Set to store unique first components of select fields + unique_components = set() + # Keep track of last flattened lookup index + last_flat_lookup_index = -1 + + # Iterate through the fields + for idx, field in enumerate(fields): + if "." in field: + # Split the field by '.' and add the first component to the set + first_component = field.split(".")[0] + unique_components.add(first_component) + last_flat_lookup_index = max(last_flat_lookup_index, idx) + else: + # Add the field to the load_fields list + load_fields.append(field) + + # Number of unique components + num_unique_components = len(unique_components) + + # Adjust select_fields by removing only the field at last_flat_lookup_index + 1 + if last_flat_lookup_index + 1 < len( + fields + ) and last_flat_lookup_index + num_unique_components < len(fields): + select_fields = ( + fields[: last_flat_lookup_index + 1] + + fields[last_flat_lookup_index + num_unique_components + 1 :] + ) + else: + select_fields = fields + + return load_fields, select_fields + + +# Function to reorder records based on the new field list +def reorder_records(records, original_fields, new_fields): + if not original_fields: + raise KeyError("original_fields should not be empty") + # Map the original field indices + field_index_map = {field: i for i, field in enumerate(original_fields)} + reordered_records = [] + + for record in records: + reordered_records.append( + [ + record[field_index_map[field]] + for field in new_fields + if field in field_index_map + ] + ) + + return reordered_records diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index 3e60ef91c0..b88fa8b100 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -20,6 +20,7 @@ SelectOperationExecutor, SelectRecordRetrievalMode, SelectStrategy, + split_and_filter_fields, ) from cumulusci.tasks.bulkdata.utils import DataApi, iterate_in_chunks from cumulusci.utils.classutils import namedtuple_as_simple_dict @@ -356,6 +357,7 @@ def __init__( selection_filter=None, selection_priority_fields=None, content_type=None, + threshold=None, ): super().__init__( sobject=sobject, @@ -377,6 +379,7 @@ def __init__( priority_fields=selection_priority_fields, fields=fields ) self.content_type = content_type if content_type else "CSV" + self.threshold = threshold def start(self): self.job_id = self.bulk.create_job( @@ -459,18 +462,7 @@ def select_records(self, records): records, records_copy = tee(records) # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) - - # Set LIMIT condition - if ( - self.select_operation_executor.retrieval_mode - == SelectRecordRetrievalMode.ALL - ): - limit_clause = None - elif ( - self.select_operation_executor.retrieval_mode - == SelectRecordRetrievalMode.MATCH - ): - limit_clause = total_num_records + limit_clause = self._determine_limit_clause(total_num_records=total_num_records) # Generate and execute SOQL query # (not passing offset as it is not supported in Bulk) @@ -494,14 +486,34 @@ def select_records(self, records): # Post-process the query results ( selected_records, + insert_records, error_message, ) = self.select_operation_executor.select_post_process( load_records=records, query_records=query_records, + fields=self.fields, num_records=total_num_records, sobject=self.sobject, weights=self.weights, + threshold=self.threshold, ) + + # Log the number of selected and prepared for insertion records + num_selected = sum(1 for record in selected_records if record) + num_prepared = len(insert_records) if insert_records else 0 + + self.logger.info( + f"{num_selected} records selected." + + ( + f" {num_prepared} records prepared for insertion." + if num_prepared > 0 + else "" + ) + ) + + if insert_records: + self._process_insert_records(insert_records, selected_records) + if not error_message: self.select_results.extend(selected_records) @@ -517,6 +529,60 @@ def select_records(self, records): total_row_errors=0, ) + def _process_insert_records(self, insert_records, selected_records): + """Processes and inserts records if necessary.""" + insert_fields, _ = split_and_filter_fields(fields=self.fields) + insert_step = BulkApiDmlOperation( + sobject=self.sobject, + operation=DataOperationType.INSERT, + api_options=self.api_options, + context=self.context, + fields=insert_fields, + ) + insert_step.start() + insert_step.load_records(insert_records) + insert_step.end() + # Retrieve insert results + insert_results = [] + for batch_id in insert_step.batch_ids: + try: + results_url = f"{insert_step.bulk.endpoint}/job/{insert_step.job_id}/batch/{batch_id}/result" + # Download entire result file to a temporary file first + # to avoid the server dropping connections + with download_file(results_url, insert_step.bulk) as f: + self.logger.info(f"Downloaded results for batch {batch_id}") + reader = csv.reader(f) + next(reader) # Skip header row + for row in reader: + success = process_bool_arg(row[1]) + created = process_bool_arg(row[2]) + insert_results.append( + {"id": row[0], "success": success, "created": created} + ) + except Exception as e: + raise BulkDataException( + f"Failed to download results for batch {batch_id} ({str(e)})" + ) + + insert_index = 0 + for idx, record in enumerate(selected_records): + if record is None: + selected_records[idx] = insert_results[insert_index] + insert_index += 1 + + def _determine_limit_clause(self, total_num_records): + """Determines the LIMIT clause based on the retrieval mode.""" + if ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.ALL + ): + return None + elif ( + self.select_operation_executor.retrieval_mode + == SelectRecordRetrievalMode.MATCH + ): + return total_num_records + def _execute_select_query(self, select_query: str, query_fields: List[str]): """Executes the select Bulk API query, retrieves results in JSON, and converts to CSV format if needed.""" self.batch_id = self.bulk.query(self.job_id, select_query) @@ -660,6 +726,7 @@ def __init__( selection_filter=None, selection_priority_fields=None, content_type=None, + threshold=None, ): super().__init__( sobject=sobject, @@ -691,6 +758,7 @@ def __init__( priority_fields=selection_priority_fields, fields=fields ) self.content_type = content_type + self.threshold = threshold def _record_to_json(self, rec): result = dict(zip(self.fields, rec)) @@ -804,74 +872,126 @@ def select_records(self, records): self.results = [] query_records = [] + # Create a copy of the generator using tee records, records_copy = tee(records) + # Count total number of records to fetch using the copy total_num_records = sum(1 for _ in records_copy) # Set LIMIT condition + limit_clause = self._determine_limit_clause(total_num_records) + + # Generate the SOQL query based on the selection strategy + select_query, query_fields = ( + self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + user_filter=self.selection_filter or None, + limit=limit_clause, + offset=None, + ) + ) + + # Execute the query and gather the records + query_records = self._execute_soql_query(select_query, query_fields) + + # Post-process the query results for this batch + selected_records, insert_records, error_message = ( + self.select_operation_executor.select_post_process( + load_records=records, + query_records=query_records, + fields=self.fields, + num_records=total_num_records, + sobject=self.sobject, + weights=self.weights, + threshold=self.threshold, + ) + ) + + # Log the number of selected and prepared for insertion records + num_selected = sum(1 for record in selected_records if record) + num_prepared = len(insert_records) if insert_records else 0 + + self.logger.info( + f"{num_selected} records selected." + + ( + f" {num_prepared} records prepared for insertion." + if num_prepared > 0 + else "" + ) + ) + + if insert_records: + self._process_insert_records(insert_records, selected_records) + + if not error_message: + # Add selected records from this batch to the overall results + self.results.extend(selected_records) + + # Update the job result based on the overall selection outcome + self._update_job_result(error_message) + + def _determine_limit_clause(self, total_num_records): + """Determines the LIMIT clause based on the retrieval mode.""" if ( self.select_operation_executor.retrieval_mode == SelectRecordRetrievalMode.ALL ): - limit_clause = None + return None elif ( self.select_operation_executor.retrieval_mode == SelectRecordRetrievalMode.MATCH ): - limit_clause = total_num_records + return total_num_records - # Generate the SOQL query based on the selection strategy - ( - select_query, - query_fields, - ) = self.select_operation_executor.select_generate_query( - sobject=self.sobject, - fields=self.fields, - user_filter=self.selection_filter if self.selection_filter else None, - limit=limit_clause, - offset=None, - ) - - # Handle the case where self.selection_query is None (and hence user_query is also None) + def _execute_soql_query(self, select_query, query_fields): + """Executes the SOQL query and returns the flattened records.""" + query_records = [] response = self.sf.restful( requests.utils.requote_uri(f"query/?q={select_query}"), method="GET" ) - # Convert each record to a flat row - for record in response["records"]: - flat_record = flatten_record(record, query_fields) - query_records.append(flat_record) - while True: - if not response["done"]: - response = self.sf.query_more( - response["nextRecordsUrl"], identifier_is_url=True - ) - for record in response["records"]: - flat_record = flatten_record(record, query_fields) - query_records.append(flat_record) - else: - break + query_records.extend(self._flatten_response_records(response, query_fields)) - # Post-process the query results for this batch - ( - selected_records, - error_message, - ) = self.select_operation_executor.select_post_process( - load_records=records, - query_records=query_records, - num_records=total_num_records, + while not response["done"]: + response = self.sf.query_more( + response["nextRecordsUrl"], identifier_is_url=True + ) + query_records.extend(self._flatten_response_records(response, query_fields)) + + return query_records + + def _flatten_response_records(self, response, query_fields): + """Flattens the response records and returns them as a list.""" + return [flatten_record(record, query_fields) for record in response["records"]] + + def _process_insert_records(self, insert_records, selected_records): + """Processes and inserts records if necessary.""" + insert_fields, _ = split_and_filter_fields(fields=self.fields) + insert_step = RestApiDmlOperation( sobject=self.sobject, - weights=self.weights, + operation=DataOperationType.INSERT, + api_options=self.api_options, + context=self.context, + fields=insert_fields, ) - if not error_message: - # Add selected records from this batch to the overall results - self.results.extend(selected_records) - - # Update the job result based on the overall selection outcome + insert_step.start() + insert_step.load_records(insert_records) + insert_step.end() + insert_results = insert_step.results + + insert_index = 0 + for idx, record in enumerate(selected_records): + if record is None: + selected_records[idx] = insert_results[insert_index] + insert_index += 1 + + def _update_job_result(self, error_message): + """Updates the job result based on the selection outcome.""" self.job_result = DataOperationJobResult( status=( DataOperationStatus.SUCCESS - if len(self.results) # Check the overall results length + if len(self.results) else DataOperationStatus.JOB_FAILURE ), job_errors=[error_message] if error_message else [], @@ -964,6 +1084,7 @@ def get_dml_operation( selection_filter: Union[str, None] = None, selection_priority_fields: Union[dict, None] = None, content_type: Union[str, None] = None, + threshold: Union[float, None] = None, ) -> BaseDmlOperation: """Create an appropriate DmlOperation instance for the given parameters, selecting between REST and Bulk APIs based upon volume (Bulk used at volumes over 2000 records, @@ -1001,6 +1122,7 @@ def get_dml_operation( selection_filter=selection_filter, selection_priority_fields=selection_priority_fields, content_type=content_type, + threshold=threshold, ) diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml new file mode 100644 index 0000000000..1bad614b1d --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_number.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: 1.5 + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml new file mode 100644 index 0000000000..71958848c5 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__invalid_strategy.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: standard + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: 0.5 + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml new file mode 100644 index 0000000000..2ff1482f3d --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/mapping_select_invalid_threshold__non_float.yml @@ -0,0 +1,21 @@ +# Select Mapping File for load +Select Accounts: + api: bulk + action: select + sf_object: Account + table: accounts + select_options: + strategy: similarity + filter: WHEN Name in ('Sample Account') + priority_fields: + Name: name + AccountNumber: account_number + threshold: invalid threshold + fields: + Name: name + AccountNumber: account_number + Description: description + lookups: + ParentId: + key_field: parent_id + table: accounts diff --git a/cumulusci/tasks/bulkdata/tests/test_load.py b/cumulusci/tasks/bulkdata/tests/test_load.py index 9fb6ea1d87..8fb8ee0756 100644 --- a/cumulusci/tasks/bulkdata/tests/test_load.py +++ b/cumulusci/tasks/bulkdata/tests/test_load.py @@ -835,13 +835,12 @@ def test_process_lookup_fields_polymorphic(self): "Subject", "Who.Contact.FirstName", "Who.Contact.LastName", - "Who.Contact.AccountId", "Who.Lead.LastName", + "WhoId", ] expected_priority_fields_keys = { "Who.Contact.FirstName", "Who.Contact.LastName", - "Who.Contact.AccountId", "Who.Lead.LastName", } with mock.patch( @@ -886,6 +885,7 @@ def test_process_lookup_fields_non_polymorphic(self): "LastName", "Account.Name", "Account.AccountNumber", + "AccountId", ] expected_priority_fields_keys = { "FirstName", @@ -989,7 +989,7 @@ def test_query_db__joins_select_lookups(self): sql_path=Path(__file__).parent / "test_query_db_joins_lookups.sql", mapping=Path(__file__).parent / "test_query_db_joins_lookups_select.yml", mapping_step_name="Select Event", - expected='''SELECT events.id AS events_id, events."subject" AS "events_subject", "whoid_contacts_alias"."firstname" AS "whoid_contacts_alias_firstname", "whoid_contacts_alias"."lastname" AS "whoid_contacts_alias_lastname", '' AS "whoid_contacts_alias_accountid", "whoid_leads_alias"."lastname" AS "whoid_leads_alias_lastname" from events LEFT OUTER JOIN contacts AS "whoid_contacts_alias" ON "whoid_contacts_alias".id=events."whoid" LEFT OUTER JOIN leads AS "whoid_leads_alias" ON "whoid_leads_alias".id=events."whoid" ORDER BY events."whoid"''', + expected='''SELECT events.id AS events_id, events."subject" AS "events_subject", "whoid_contacts_alias"."firstname" AS "whoid_contacts_alias_firstname", "whoid_contacts_alias"."lastname" AS "whoid_contacts_alias_lastname", "whoid_leads_alias"."lastname" AS "whoid_leads_alias_lastname", cumulusci_id_table_1.sf_id AS cumulusci_id_table_1_sf_id FROM events LEFT OUTER JOIN contacts AS "whoid_contacts_alias" ON "whoid_contacts_alias".id=events."whoid" LEFT OUTER JOIN leads AS "whoid_leads_alias" ON "whoid_leads_alias".id=events."whoid" LEFT OUTER JOIN cumulusci_id_table AS cumulusci_id_table_1 ON cumulusci_id_table_1.id=? || cast(events."whoid" as varchar) ORDER BY events."whoid"''', ) def test_query_db__joins_polymorphic_lookups(self): diff --git a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py index ae9fe91686..8ce38ff5a8 100644 --- a/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py +++ b/cumulusci/tasks/bulkdata/tests/test_mapping_parser.py @@ -231,6 +231,35 @@ def test_select_options__invalid_strategy(self): parse_from_yaml(base_path) assert "Invalid strategy value: invalid_strategy" in str(e.value) + def test_select_options__invalid_threshold__non_float(self): + base_path = ( + Path(__file__).parent / "mapping_select_invalid_threshold__non_float.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "value is not a valid float" in str(e.value) + + def test_select_options__invalid_threshold__invalid_strategy(self): + base_path = ( + Path(__file__).parent + / "mapping_select_invalid_threshold__invalid_strategy.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert ( + "If a threshold is specified, the strategy must be set to 'similarity'." + in str(e.value) + ) + + def test_select_options__invalid_threshold__invalid_number(self): + base_path = ( + Path(__file__).parent + / "mapping_select_invalid_threshold__invalid_number.yml" + ) + with pytest.raises(ValueError) as e: + parse_from_yaml(base_path) + assert "Threshold must be between 0 and 1, got 1.5" in str(e.value) + def test_select_options__missing_priority_fields(self): base_path = Path(__file__).parent / "mapping_select_missing_priority_fields.yml" with pytest.raises(ValueError) as e: diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 4969722c6e..a0b5a3fcad 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -10,7 +10,9 @@ determine_field_types, find_closest_record, levenshtein_distance, + reorder_records, replace_empty_strings_with_missing, + split_and_filter_fields, vectorize_records, ) @@ -100,8 +102,14 @@ def test_standard_post_process_with_records(): records = [["001"], ["002"], ["003"]] num_records = 3 sobject = "Contact" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, ) assert error_message is None @@ -116,8 +124,14 @@ def test_standard_post_process_with_fewer_records(): records = [["001"]] num_records = 3 sobject = "Opportunity" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, ) assert error_message is None @@ -133,8 +147,14 @@ def test_standard_post_process_with_no_records(): records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, ) assert selected_records == [] @@ -147,8 +167,14 @@ def test_random_post_process_with_records(): records = [["001"], ["002"], ["003"]] num_records = 3 sobject = "Contact" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, ) assert error_message is None @@ -162,8 +188,14 @@ def test_random_post_process_with_no_records(): records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[], + fields=[], + threshold=None, ) assert selected_records == [] @@ -279,7 +311,7 @@ def test_find_closest_record_different_weights(): weights = [2.0, 0.5] # With different weights, the first field will have more impact - closest_record = find_closest_record(load_record, query_records, weights) + closest_record, _ = find_closest_record(load_record, query_records, weights) assert closest_record == [ "record1", "hello", @@ -296,7 +328,7 @@ def test_find_closest_record_basic(): ] weights = [1.0, 1.0] - closest_record = find_closest_record(load_record, query_records, weights) + closest_record, _ = find_closest_record(load_record, query_records, weights) assert closest_record == [ "record1", "hello", @@ -313,7 +345,7 @@ def test_find_closest_record_multiple_matches(): ] weights = [1.0, 1.0] - closest_record = find_closest_record(load_record, query_records, weights) + closest_record, _ = find_closest_record(load_record, query_records, weights) assert closest_record == [ "record2", "cat", @@ -327,25 +359,29 @@ def test_similarity_post_process_with_records(): sobject = "Contact" load_records = [["Tom Cruise", "62", "Actor"]] query_records = [ - ["001", "Tom Hanks", "62", "Actor"], + ["001", "Bob Hanks", "62", "Actor"], ["002", "Tom Cruise", "63", "Actor"], # Slight difference ["003", "Jennifer Aniston", "30", "Actress"], ] weights = [1.0, 1.0, 1.0] # Adjust weights to match your data structure - selected_records, error_message = select_operator.select_post_process( - load_records, query_records, num_records, sobject, weights + selected_records, _, error_message = select_operator.select_post_process( + load_records=load_records, + query_records=query_records, + num_records=num_records, + sobject=sobject, + weights=weights, + fields=["Name", "Age", "Occupation"], + threshold=None, ) - # selected_records, error_message = select_operator.select_post_process( - # load_records, query_records, num_records, sobject - # ) - assert error_message is None assert len(selected_records) == num_records assert all(record["success"] for record in selected_records) assert all(record["created"] is False for record in selected_records) + x = [record["id"] for record in selected_records] + print(x) assert all(record["id"] in ["002"] for record in selected_records) @@ -354,8 +390,14 @@ def test_similarity_post_process_with_no_records(): records = [] num_records = 2 sobject = "Lead" - selected_records, error_message = select_operator.select_post_process( - None, records, num_records, sobject, weights=[1, 1, 1] + selected_records, _, error_message = select_operator.select_post_process( + load_records=None, + query_records=records, + num_records=num_records, + sobject=sobject, + weights=[1, 1, 1], + fields=[], + threshold=None, ) assert selected_records == [] @@ -369,7 +411,7 @@ def test_calculate_levenshtein_distance_basic(): # Expected distance based on simple Levenshtein distances # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "word") = 1 - expected_distance = (1 * 1.0 + 1 * 1.0) / 2 # Averaged over two fields + expected_distance = (1 / 5 * 1.0 + 1 / 5 * 1.0) / 2 # Averaged over two fields result = calculate_levenshtein_distance(record1, record2, weights) assert result == pytest.approx( @@ -383,7 +425,7 @@ def test_calculate_levenshtein_distance_basic(): # Expected distance based on simple Levenshtein distances # Levenshtein("hello", "hullo") = 1, Levenshtein("", "") = 0 - expected_distance = (1 * 1.0 + 0 * 1.0) / 2 # Averaged over two fields + expected_distance = (1 / 5 * 1.0 + 0 * 1.0) / 2 # Averaged over two fields result = calculate_levenshtein_distance(record1, record2, weights) assert result == pytest.approx( @@ -397,7 +439,9 @@ def test_calculate_levenshtein_distance_basic(): # Expected distance based on simple Levenshtein distances # Levenshtein("hello", "hullo") = 1, Levenshtein("world", "") = 5 - expected_distance = (1 * 1.0 + 5 * 0.05 * 1.0) / 2 # Averaged over two fields + expected_distance = ( + 1 / 5 * 1.0 + 5 / 5 * 0.05 * 1.0 + ) / 2 # Averaged over two fields result = calculate_levenshtein_distance(record1, record2, weights) assert result == pytest.approx( @@ -411,7 +455,9 @@ def test_calculate_levenshtein_distance_weighted(): weights = [2.0, 0.5] # Levenshtein("cat", "bat") = 1, Levenshtein("dog", "fog") = 1 - expected_distance = (1 * 2.0 + 1 * 0.5) / 2 # Weighted average over two fields + expected_distance = ( + 1 / 3 * 2.0 + 1 / 3 * 0.5 + ) / 2.5 # Weighted average over two fields result = calculate_levenshtein_distance(record1, record2, weights) assert result == pytest.approx( @@ -571,7 +617,13 @@ def test_annoy_post_process(): query_records = [["q1", "Alice", "Engineer"], ["q2", "Charlie", "Artist"]] weights = [1.0, 1.0, 1.0] # Example weights - closest_records, error = annoy_post_process(load_records, query_records, weights) + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=None, + ) # Assert the closest records assert ( @@ -582,7 +634,97 @@ def test_annoy_post_process(): ) # The first query record should match the first load record # No errors expected - assert error is None + assert not insert_records + + +def test_annoy_post_process__insert_records(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [["q1", "Alice", "Engineer"], ["q2", "Charlie", "Artist"]] + weights = [1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (one record and one None) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + assert closest_records[1] is None # The second query record should be None + assert insert_records[0] == [ + "Bob", + "Doctor", + ] # The first insert record should match the second load record + + +def test_annoy_post_process__no_query_records(): + # Test data + load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] + query_records = [] + weights = [1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (both None) + assert all(rec is None for rec in closest_records) # Both should be None + assert insert_records[0] == [ + "Alice", + "Engineer", + ] # The first insert record should match the second load record + assert insert_records[1] == [ + "Bob", + "Doctor", + ] # The first insert record should match the second load record + + +def test_annoy_post_process__insert_records_with_polymorphic_fields(): + # Test data + load_records = [ + ["Alice", "Engineer", "Alice_Contact", "abcd1234"], + ["Bob", "Doctor", "Bob_Contact", "qwer1234"], + ] + query_records = [ + ["q1", "Alice", "Engineer", "Alice_Contact"], + ["q2", "Charlie", "Artist", "Charlie_Contact"], + ] + weights = [1.0, 1.0, 1.0, 1.0] # Example weights + threshold = 0.3 + all_fields = ["Name", "Occupation", "Contact.Name", "ContactId"] + + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=all_fields, + threshold=threshold, + ) + + # Assert the closest records + assert len(closest_records) == 2 # We expect two results (one record and one None) + assert ( + closest_records[0]["id"] == "q1" + ) # The first query record should match the first load record + assert closest_records[1] is None # The second query record should be None + assert insert_records[0] == [ + "Bob", + "Doctor", + "qwer1234", + ] # The first insert record should match the second load record def test_single_record_match_annoy_post_process(): @@ -591,12 +733,18 @@ def test_single_record_match_annoy_post_process(): query_records = [["q1", "Alice", "Engineer"]] weights = [1.0, 1.0, 1.0] - closest_records, error = annoy_post_process(load_records, query_records, weights) + closest_records, insert_records = annoy_post_process( + load_records=load_records, + query_records=query_records, + similarity_weights=weights, + all_fields=["Name", "Occupation"], + threshold=None, + ) # Both the load records should be matched with the only query record we have assert len(closest_records) == 2 assert closest_records[0]["id"] == "q1" - assert error is None + assert not insert_records @pytest.mark.parametrize( @@ -653,3 +801,206 @@ def test_add_limit_offset_to_user_filter( ): result = add_limit_offset_to_user_filter(filter_clause, limit_clause, offset_clause) assert result.strip() == expected.strip() + + +def test_reorder_records_basic_reordering(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["job", "name"] + + expected = [ + ["Engineer", "Alice"], + ["Designer", "Bob"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_partial_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["age"] + + expected = [ + [30], + [25], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_missing_fields_in_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["nonexistent", "job"] + + expected = [ + ["Engineer"], + ["Designer"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_records(): + records = [] + original_fields = ["name", "age", "job"] + new_fields = ["job", "name"] + + expected = [] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = [] + + expected = [ + [], + [], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_empty_original_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = [] + new_fields = ["job", "name"] + + with pytest.raises(KeyError): + reorder_records(records, original_fields, new_fields) + + +def test_reorder_records_no_common_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["nonexistent_field"] + + expected = [ + [], + [], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_duplicate_fields_in_new_fields(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["job", "job", "name"] + + expected = [ + ["Engineer", "Engineer", "Alice"], + ["Designer", "Designer", "Bob"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_reorder_records_all_fields_in_order(): + records = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + original_fields = ["name", "age", "job"] + new_fields = ["name", "age", "job"] + + expected = [ + ["Alice", 30, "Engineer"], + ["Bob", 25, "Designer"], + ] + result = reorder_records(records, original_fields, new_fields) + assert result == expected + + +def test_split_and_filter_fields_basic_case(): + fields = [ + "Account.Name", + "Account.Industry", + "Contact.Name", + "AccountId", + "ContactId", + "CreatedDate", + ] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["AccountId", "ContactId", "CreatedDate"] + assert select_fields == [ + "Account.Name", + "Account.Industry", + "Contact.Name", + "CreatedDate", + ] + + +def test_split_and_filter_fields_all_non_lookup_fields(): + fields = ["Name", "CreatedDate"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Name", "CreatedDate"] + assert select_fields == fields + + +def test_split_and_filter_fields_all_lookup_fields(): + fields = ["Account.Name", "Account.Industry", "Contact.Name"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == fields + + +def test_split_and_filter_fields_empty_fields(): + fields = [] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == [] + + +def test_split_and_filter_fields_single_non_lookup_field(): + fields = ["Id"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Id"] + assert select_fields == ["Id"] + + +def test_split_and_filter_fields_single_lookup_field(): + fields = ["Account.Name"] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == [] + assert select_fields == ["Account.Name"] + + +def test_split_and_filter_fields_multiple_unique_lookups(): + fields = [ + "Account.Name", + "Account.Industry", + "Contact.Email", + "Contact.Phone", + "Id", + ] + load_fields, select_fields = split_and_filter_fields(fields) + assert load_fields == ["Id"] + assert ( + select_fields == fields + ) # No filtering applied since all components are unique diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index bd059b9bbf..3d797df8bf 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -909,6 +909,7 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic( "Who.Contact.Email", "Who.Lead.Name", "Who.Lead.Company", + "WhoId", ], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -921,15 +922,22 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic( download_mock.return_value = io.StringIO( """[ - {"Id": "003000000000001", "Subject": "Sample Event 1", "Who":{ "attributes": {"type": "Contact"}, "Name": "Sample Contact", "Email": "contact@example.com"}}, - { "Id": "003000000000002", "Subject": "Sample Event 2", "Who":{ "attributes": {"type": "Lead"}, "Name": "Sample Lead", "Company": "Salesforce"}} + {"Id": "003000000000001", "Subject": "Sample Event 1", "Who":{ "attributes": {"type": "Contact"}, "Id": "abcd1234", "Name": "Sample Contact", "Email": "contact@example.com"}}, + { "Id": "003000000000002", "Subject": "Sample Event 2", "Who":{ "attributes": {"type": "Lead"}, "Id": "qwer1234", "Name": "Sample Lead", "Company": "Salesforce"}} ]""" ) records = iter( [ - ["Sample Event 1", "Sample Contact", "contact@example.com", "", ""], - ["Sample Event 2", "", "", "Sample Lead", "Salesforce"], + [ + "Sample Event 1", + "Sample Contact", + "contact@example.com", + "", + "", + "lkjh1234", + ], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce", "poiu1234"], ] ) step.start() @@ -960,7 +968,7 @@ def test_select_records_similarity_strategy_parent_level_records__non_polymorphi operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=context, - fields=["Name", "Account.Name", "Account.AccountNumber"], + fields=["Name", "Account.Name", "Account.AccountNumber", "AccountId"], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -972,15 +980,15 @@ def test_select_records_similarity_strategy_parent_level_records__non_polymorphi download_mock.return_value = io.StringIO( """[ - {"Id": "003000000000001", "Name": "Sample Contact 1", "Account":{ "attributes": {"type": "Account"}, "Name": "Sample Account", "AccountNumber": 123456}}, + {"Id": "003000000000001", "Name": "Sample Contact 1", "Account":{ "attributes": {"type": "Account"}, "Id": "abcd1234", "Name": "Sample Account", "AccountNumber": 123456}}, { "Id": "003000000000002", "Subject": "Sample Contact 2", "Account": null} ]""" ) records = iter( [ - ["Sample Contact 3", "Sample Account", "123456"], - ["Sample Contact 4", "", ""], + ["Sample Contact 3", "Sample Account", "123456", "poiu1234"], + ["Sample Contact 4", "", "", ""], ] ) step.start() @@ -1009,7 +1017,13 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=context, - fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], selection_strategy=SelectStrategy.SIMILARITY, selection_priority_fields={"Name": "Name", "Email": "Email"}, ) @@ -1019,7 +1033,13 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=context, - fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], selection_strategy=SelectStrategy.SIMILARITY, selection_priority_fields={ "Account.Name": "Account.Name", @@ -1044,6 +1064,7 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) "Email": "bob@yahoo.org", "Account": { "attributes": {"type": "Account"}, + "Id": "abcd1234", "Name": "Jawad TP", "AccountNumber": 567890, }, @@ -1054,6 +1075,7 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) "Email": "tom@exmaple.com", "Account": { "attributes": {"type": "Account"}, + "Id": "qwer1234", "Name": "Aditya B", "AccountNumber": 123456, }, @@ -1067,7 +1089,7 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) records = iter( [ - ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456"], + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456", "poiu1234"], ] ) records_1, records_2 = tee(records) @@ -2041,6 +2063,7 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic(se "Who.Contact.Email", "Who.Lead.Name", "Who.Lead.Company", + "WhoId", ], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -2054,6 +2077,7 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic(se "Subject": "Sample Event 1", "Who": { "attributes": {"type": "Contact"}, + "Id": "abcd1234", "Name": "Sample Contact", "Email": "contact@example.com", }, @@ -2063,6 +2087,7 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic(se "Subject": "Sample Event 2", "Who": { "attributes": {"type": "Lead"}, + "Id": "qwer1234", "Name": "Sample Lead", "Company": "Salesforce", }, @@ -2075,8 +2100,15 @@ def test_select_records_similarity_strategy_parent_level_records__polymorphic(se records = iter( [ - ["Sample Event 1", "Sample Contact", "contact@example.com", "", ""], - ["Sample Event 2", "", "", "Sample Lead", "Salesforce"], + [ + "Sample Event 1", + "Sample Contact", + "contact@example.com", + "", + "", + "poiu1234", + ], + ["Sample Event 2", "", "", "Sample Lead", "Salesforce", "lkjh1234"], ] ) step.start() @@ -2132,7 +2164,7 @@ def test_select_records_similarity_strategy_parent_level_records__non_polymorphi operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=task, - fields=["Name", "Account.Name", "Account.AccountNumber"], + fields=["Name", "Account.Name", "Account.AccountNumber", "AccountId"], selection_strategy=SelectStrategy.SIMILARITY, ) @@ -2145,6 +2177,7 @@ def test_select_records_similarity_strategy_parent_level_records__non_polymorphi "Name": "Sample Contact 1", "Account": { "attributes": {"type": "Account"}, + "Id": "abcd1234", "Name": "Sample Account", "AccountNumber": 123456, }, @@ -2162,8 +2195,8 @@ def test_select_records_similarity_strategy_parent_level_records__non_polymorphi records = iter( [ - ["Sample Contact 3", "Sample Account", "123456"], - ["Sample Contact 4", "", ""], + ["Sample Contact 3", "Sample Account", "123456", "poiu1234"], + ["Sample Contact 4", "", "", ""], ] ) step.start() @@ -2229,7 +2262,13 @@ def test_select_records_similarity_strategy_priority_fields(self): operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=task_1, - fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], selection_strategy=SelectStrategy.SIMILARITY, selection_priority_fields={"Name": "Name", "Email": "Email"}, ) @@ -2239,7 +2278,13 @@ def test_select_records_similarity_strategy_priority_fields(self): operation=DataOperationType.QUERY, api_options={"batch_size": 10}, context=task_2, - fields=["Name", "Email", "Account.Name", "Account.AccountNumber"], + fields=[ + "Name", + "Email", + "Account.Name", + "Account.AccountNumber", + "AccountId", + ], selection_strategy=SelectStrategy.SIMILARITY, selection_priority_fields={ "Account.Name": "Account.Name", @@ -2256,6 +2301,7 @@ def test_select_records_similarity_strategy_priority_fields(self): "Email": "bob@yahoo.org", "Account": { "attributes": {"type": "Account"}, + "Id": "abcd1234", "Name": "Jawad TP", "AccountNumber": 567890, }, @@ -2266,6 +2312,7 @@ def test_select_records_similarity_strategy_priority_fields(self): "Email": "tom@exmaple.com", "Account": { "attributes": {"type": "Account"}, + "Id": "qwer1234", "Name": "Aditya B", "AccountNumber": 123456, }, @@ -2280,7 +2327,7 @@ def test_select_records_similarity_strategy_priority_fields(self): records = iter( [ - ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456"], + ["Bob The Builder", "bob@yahoo.org", "Aditya B", "123456", "poiu1234"], ] ) records_1, records_2 = tee(records) @@ -2803,6 +2850,7 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): selection_filter=None, selection_priority_fields=None, content_type=None, + threshold=None, ) op = get_dml_operation( @@ -2828,6 +2876,7 @@ def test_get_dml_operation(self, rest_dml, bulk_dml): selection_filter=None, selection_priority_fields=None, content_type=None, + threshold=None, ) @mock.patch("cumulusci.tasks.bulkdata.step.BulkApiDmlOperation") From f4fb69603da4285fe3d9c6be6a991b2bf664d7f7 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 19:01:08 +0530 Subject: [PATCH 43/65] Add new imports to requirements.txt --- .pre-commit-config.yaml | 6 +- cumulusci/tasks/bulkdata/tests/test_step.py | 532 ++++++++++++++++++++ pyproject.toml | 4 + requirements/dev.txt | 36 +- requirements/prod.txt | 34 +- 5 files changed, 590 insertions(+), 22 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b1a928eafd..62af507949 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ default_language_version: python: python3 repos: - repo: https://github.com/ambv/black - rev: 24.10.0 + rev: 22.3.0 hooks: - id: black - repo: https://github.com/pre-commit/pre-commit-hooks @@ -18,12 +18,12 @@ repos: - id: rst-linter exclude: "docs" - repo: https://github.com/pycqa/isort - rev: 5.13.2 + rev: 5.12.0 hooks: - id: isort args: ["--profile", "black", "--filter-files"] - repo: https://github.com/pre-commit/mirrors-prettier - rev: v4.0.0-alpha.8 + rev: v2.5.1 hooks: - id: prettier - repo: local diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index 3d797df8bf..e94e91f226 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1121,6 +1121,304 @@ def test_select_records_similarity_strategy_priority_fields(self, download_mock) id="003000000000002", success=True, error="", created=False ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_process_insert_records_success(self, download_mock): + # Mock context and insert records + context = mock.Mock() + insert_records = iter([["John", "Doe"], ["Jane", "Smith"]]) + selected_records = [None, None] + + # Mock insert fields splitting + insert_fields = ["FirstName", "LastName"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["FirstName", "LastName"], + ) + + # Mock Bulk API + step.bulk.endpoint = "https://test" + step.bulk.create_insert_job.return_value = "JOB" + step.bulk.get_insert_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with successful results + download_mock.return_value = io.StringIO( + "Id,Success,Created\n0011k00003E8xAaAAI,true,true\n0011k00003E8xAbAAJ,true,true\n" + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + step._process_insert_records(insert_records, selected_records) + + # Assertions for split fields and sub-operation + split_mock.assert_called_once_with(fields=["FirstName", "LastName"]) + insert_step.start.assert_called_once() + insert_step.load_records.assert_called_once_with(insert_records) + insert_step.end.assert_called_once() + + # Validate the download file interactions + download_mock.assert_called_once_with( + "https://test/job/JOB/batch/BATCH1/result", insert_step.bulk + ) + + # Validate that selected_records is updated with insert results + assert selected_records == [ + {"id": "0011k00003E8xAaAAI", "success": True, "created": True}, + {"id": "0011k00003E8xAbAAJ", "success": True, "created": True}, + ] + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_process_insert_records_failure(self, download_mock): + # Mock context and insert records + context = mock.Mock() + insert_records = iter([["John", "Doe"], ["Jane", "Smith"]]) + selected_records = [None, None] + + # Mock insert fields splitting + insert_fields = ["FirstName", "LastName"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ): + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10}, + context=context, + fields=["FirstName", "LastName"], + ) + + # Mock failure during results download + download_mock.side_effect = Exception("Download failed") + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + with pytest.raises(BulkDataException) as excinfo: + step._process_insert_records(insert_records, selected_records) + + # Validate that the exception is raised with the correct message + assert "Failed to download results for batch BATCH1" in str( + excinfo.value + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy__insert_records(self, download_mock): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + # Add step with threshold + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + select_results = io.StringIO( + """[{"Id":"003000000000001", "Name":"Jawad", "Email":"mjawadtp@example.com"}]""" + ) + insert_results = io.StringIO( + "Id,Success,Created\n003000000000002,true,true\n003000000000003,true,true\n" + ) + download_mock.side_effect = [select_results, insert_results] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy__insert_records__no_select_records( + self, download_mock + ): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + # Add step with threshold + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + select_results = io.StringIO("""[]""") + insert_results = io.StringIO( + "Id,Success,Created\n003000000000001,true,true\n003000000000002,true,true\n003000000000003,true,true\n" + ) + download_mock.side_effect = [select_results, insert_results] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + def test_batch(self): context = mock.Mock() @@ -2359,6 +2657,240 @@ def test_select_records_similarity_strategy_priority_fields(self): id="003000000000002", success=True, error="", created=False ) + @responses.activate + def test_process_insert_records_success(self): + # Mock describe calls + mock_describe_calls() + + # Create a task and mock project config + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Prepare inputs + insert_records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tomcruise@example.com"], + ] + ) + selected_records = [None, None, None] + + # Mock fields splitting + insert_fields = ["Name", "Email"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + # Mock the instance of RestApiDmlOperation + mock_rest_api_dml_operation = mock.create_autospec( + RestApiDmlOperation, instance=True + ) + mock_rest_api_dml_operation.results = [ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + {"id": "003000000000003", "success": True}, + ] + + with mock.patch( + "cumulusci.tasks.bulkdata.step.RestApiDmlOperation", + return_value=mock_rest_api_dml_operation, + ): + # Call the function + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + ) + step._process_insert_records(insert_records, selected_records) + + # Assert the mocked splitting is called + split_mock.assert_called_once_with(fields=["Name", "Email"]) + + # Validate that `selected_records` is updated correctly + assert selected_records == [ + {"id": "003000000000001", "success": True}, + {"id": "003000000000002", "success": True}, + {"id": "003000000000003", "success": True}, + ] + + # Validate the operation sequence + mock_rest_api_dml_operation.start.assert_called_once() + mock_rest_api_dml_operation.load_records.assert_called_once_with( + insert_records + ) + mock_rest_api_dml_operation.end.assert_called_once() + + @responses.activate + def test_process_insert_records_failure(self): + # Mock describe calls + mock_describe_calls() + + # Create a task and mock project config + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Prepare inputs + insert_records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ] + ) + selected_records = [None, None] + + # Mock fields splitting + insert_fields = ["Name", "Email"] + with mock.patch( + "cumulusci.tasks.bulkdata.step.split_and_filter_fields", + return_value=(insert_fields, None), + ) as split_mock: + # Mock the instance of RestApiDmlOperation + mock_rest_api_dml_operation = mock.create_autospec( + RestApiDmlOperation, instance=True + ) + mock_rest_api_dml_operation.results = ( + None # Simulate no results due to an exception + ) + + # Simulate an exception during processing results + mock_rest_api_dml_operation.load_records.side_effect = BulkDataException( + "Simulated failure" + ) + + with mock.patch( + "cumulusci.tasks.bulkdata.step.RestApiDmlOperation", + return_value=mock_rest_api_dml_operation, + ): + # Call the function and verify that it raises the expected exception + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.INSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + ) + with pytest.raises(BulkDataException): + step._process_insert_records(insert_records, selected_records) + + # Assert the mocked splitting is called + split_mock.assert_called_once_with(fields=["Name", "Email"]) + + # Validate that `selected_records` remains unchanged + assert selected_records == [None, None] + + # Validate the operation sequence + mock_rest_api_dml_operation.start.assert_called_once() + mock_rest_api_dml_operation.load_records.assert_called_once_with( + insert_records + ) + mock_rest_api_dml_operation.end.assert_not_called() + + @responses.activate + def test_select_records_similarity_strategy__insert_records(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Create step with threshold + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0.3, + ) + + results_select_call = { + "records": [ + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + ], + "done": True, + } + + results_insert_call = [ + {"id": "003000000000002", "success": True, "created": True}, + {"id": "003000000000003", "success": True, "created": True}, + ] + + step.sf.restful = mock.Mock( + side_effect=[results_select_call, results_insert_call] + ) + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() diff --git a/pyproject.toml b/pyproject.toml index b04f0b66c6..ae56e71afe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", ] dependencies = [ + "annoy", "click", "cryptography", "python-dateutil", @@ -38,6 +39,8 @@ dependencies = [ "lxml", "markdown-it-py==2.2.0", # resolve dependency conflict between prod/dev "MarkupSafe", + "numpy", + "pandas", "psutil", "pydantic<2", "PyJWT", @@ -55,6 +58,7 @@ dependencies = [ "sarge", "selenium<4", "simple-salesforce==1.11.4", + "scikit-learn", "snowfakery", "SQLAlchemy<2", "xmltodict", diff --git a/requirements/dev.txt b/requirements/dev.txt index 9a7d8b1fac..a90bf89cac 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,11 +1,13 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --all-extras --output-file=requirements/dev.txt pyproject.toml # alabaster==0.7.13 # via sphinx +annoy==1.17.3 + # via cumulusci (pyproject.toml) appdirs==1.4.4 # via fs attrs==24.2.0 @@ -53,7 +55,6 @@ cryptography==43.0.1 # authlib # cumulusci (pyproject.toml) # pyjwt - # secretstorage defusedxml==0.7.1 # via cumulusci (pyproject.toml) distlib==0.3.8 @@ -89,9 +90,7 @@ furo==2023.3.27 github3-py==4.0.1 # via cumulusci (pyproject.toml) greenlet==3.0.3 - # via - # snowfakery - # sqlalchemy + # via snowfakery gvgen==1.0 # via snowfakery identify==2.6.0 @@ -125,6 +124,8 @@ jinja2==3.1.3 # myst-parser # snowfakery # sphinx +joblib==1.4.2 + # via scikit-learn jsonschema==4.23.0 # via cumulusci (pyproject.toml) jsonschema-specifications==2023.12.1 @@ -160,6 +161,12 @@ natsort==8.4.0 # via robotframework-pabot nodeenv==1.9.1 # via pre-commit +numpy==2.0.2 + # via + # cumulusci (pyproject.toml) + # pandas + # scikit-learn + # scipy packaging==24.1 # via # black @@ -167,6 +174,8 @@ packaging==24.1 # pytest # sphinx # tox +pandas==2.2.3 + # via cumulusci (pyproject.toml) pathspec==0.12.1 # via black pkgutil-resolve-name==1.3.10 @@ -226,11 +235,12 @@ python-dateutil==2.9.0.post0 # cumulusci (pyproject.toml) # faker # github3-py + # pandas # snowfakery pytz==2024.1 # via - # babel # cumulusci (pyproject.toml) + # pandas pyyaml==6.0.1 # via # cumulusci (pyproject.toml) @@ -289,6 +299,10 @@ sarge==0.1.7.post1 # via cumulusci (pyproject.toml) secretstorage==3.3.3 # via keyring +scikit-learn==1.5.2 + # via cumulusci (pyproject.toml) +scipy==1.13.1 + # via scikit-learn selenium==3.141.0 # via # cumulusci (pyproject.toml) @@ -335,6 +349,8 @@ sqlalchemy==1.4.52 # snowfakery testfixtures==8.3.0 # via cumulusci (pyproject.toml) +threadpoolctl==3.5.0 + # via scikit-learn tomli==2.0.1 # via # black @@ -351,10 +367,10 @@ types-pyyaml==6.0.12.20240808 typing-extensions==4.10.0 # via # black - # faker # pydantic - # rich # snowfakery +tzdata==2024.2 + # via pandas unicodecsv==0.14.1 # via salesforce-bulk uritemplate==4.1.1 @@ -381,9 +397,7 @@ xmltodict==0.13.0 yarl==1.9.11 # via vcrpy zipp==3.20.1 - # via - # importlib-metadata - # importlib-resources + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/prod.txt b/requirements/prod.txt index 40ae1621a3..ab8c75581a 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -1,9 +1,11 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --output-file=requirements/prod.txt pyproject.toml # +annoy==1.17.3 + # via cumulusci (pyproject.toml) appdirs==1.4.4 # via fs authlib==1.3.2 @@ -27,7 +29,6 @@ cryptography==43.0.1 # authlib # cumulusci (pyproject.toml) # pyjwt - # secretstorage defusedxml==0.7.1 # via cumulusci (pyproject.toml) docutils==0.16 @@ -47,9 +48,7 @@ fs==2.4.16 github3-py==4.0.1 # via cumulusci (pyproject.toml) greenlet==3.0.3 - # via - # snowfakery - # sqlalchemy + # via snowfakery gvgen==1.0 # via snowfakery idna==3.6 @@ -66,6 +65,8 @@ jinja2==3.1.3 # via # cumulusci (pyproject.toml) # snowfakery +joblib==1.4.2 + # via scikit-learn keyring==23.0.1 # via cumulusci (pyproject.toml) lxml==5.3.0 @@ -83,6 +84,14 @@ mdurl==0.1.2 # via markdown-it-py natsort==8.4.0 # via robotframework-pabot +numpy==2.0.2 + # via + # cumulusci (pyproject.toml) + # pandas + # scikit-learn + # scipy +pandas==2.2.3 + # via cumulusci (pyproject.toml) psutil==6.0.0 # via cumulusci (pyproject.toml) pycparser==2.22 @@ -104,9 +113,12 @@ python-dateutil==2.9.0.post0 # cumulusci (pyproject.toml) # faker # github3-py + # pandas # snowfakery pytz==2024.1 - # via cumulusci (pyproject.toml) + # via + # cumulusci (pyproject.toml) + # pandas pyyaml==6.0.1 # via # cumulusci (pyproject.toml) @@ -149,6 +161,10 @@ sarge==0.1.7.post1 # via cumulusci (pyproject.toml) secretstorage==3.3.3 # via keyring +scikit-learn==1.5.2 + # via cumulusci (pyproject.toml) +scipy==1.13.1 + # via scikit-learn selenium==3.141.0 # via # cumulusci (pyproject.toml) @@ -169,12 +185,14 @@ sqlalchemy==1.4.52 # via # cumulusci (pyproject.toml) # snowfakery +threadpoolctl==3.5.0 + # via scikit-learn typing-extensions==4.10.0 # via - # faker # pydantic - # rich # snowfakery +tzdata==2024.2 + # via pandas unicodecsv==0.14.1 # via salesforce-bulk uritemplate==4.1.1 From 44fff95d9abcb7de9d9fadf097e3aff396b9591d Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 19:06:50 +0530 Subject: [PATCH 44/65] Add scikit-learn to pyproject.toml --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 6063c73cab..7dec9eedab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ dependencies = [ "rst2ansi>=0.1.5", "salesforce-bulk", "sarge", + "scikit-learn", "selenium<4", "simple-salesforce==1.11.4", "snowfakery>=4.0.0", From db230ceb671500ec7956da4fbcc37ebd72c919cf Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 19:34:39 +0530 Subject: [PATCH 45/65] Re-lint files --- cumulusci/tasks/bulkdata/mapping_parser.py | 6 ++-- cumulusci/tasks/bulkdata/step.py | 39 ++++++++++++---------- 2 files changed, 24 insertions(+), 21 deletions(-) diff --git a/cumulusci/tasks/bulkdata/mapping_parser.py b/cumulusci/tasks/bulkdata/mapping_parser.py index e630d564c6..59c7d630a2 100644 --- a/cumulusci/tasks/bulkdata/mapping_parser.py +++ b/cumulusci/tasks/bulkdata/mapping_parser.py @@ -91,9 +91,9 @@ class MappingStep(CCIDictModel): batch_size: int = None oid_as_pk: bool = False # this one should be discussed and probably deprecated record_type: Optional[str] = None # should be discussed and probably deprecated - bulk_mode: Optional[Literal["Serial", "Parallel"]] = ( - None # default should come from task options - ) + bulk_mode: Optional[ + Literal["Serial", "Parallel"] + ] = None # default should come from task options anchor_date: Optional[Union[str, date]] = None soql_filter: Optional[str] = None # soql_filter property select_options: Optional[SelectOptions] = Field( diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index b88fa8b100..b2a13bf966 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -883,30 +883,33 @@ def select_records(self, records): limit_clause = self._determine_limit_clause(total_num_records) # Generate the SOQL query based on the selection strategy - select_query, query_fields = ( - self.select_operation_executor.select_generate_query( - sobject=self.sobject, - fields=self.fields, - user_filter=self.selection_filter or None, - limit=limit_clause, - offset=None, - ) + ( + select_query, + query_fields, + ) = self.select_operation_executor.select_generate_query( + sobject=self.sobject, + fields=self.fields, + user_filter=self.selection_filter or None, + limit=limit_clause, + offset=None, ) # Execute the query and gather the records query_records = self._execute_soql_query(select_query, query_fields) # Post-process the query results for this batch - selected_records, insert_records, error_message = ( - self.select_operation_executor.select_post_process( - load_records=records, - query_records=query_records, - fields=self.fields, - num_records=total_num_records, - sobject=self.sobject, - weights=self.weights, - threshold=self.threshold, - ) + ( + selected_records, + insert_records, + error_message, + ) = self.select_operation_executor.select_post_process( + load_records=records, + query_records=query_records, + fields=self.fields, + num_records=total_num_records, + sobject=self.sobject, + weights=self.weights, + threshold=self.threshold, ) # Log the number of selected and prepared for insertion records From 6889461e236602f1d4ad72d61f40b7e2b81ffd50 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 20:29:35 +0530 Subject: [PATCH 46/65] Update document with new enhancements --- docs/data.md | 87 ++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 71 insertions(+), 16 deletions(-) diff --git a/docs/data.md b/docs/data.md index c81ba44c90..a3e96275c1 100644 --- a/docs/data.md +++ b/docs/data.md @@ -252,40 +252,95 @@ versa. ### Selects -The "select" functionality enhances the mapping process by enabling direct record selection from the target Salesforce org for lookups. This is achieved by specifying the `select` action in the mapping file, particularly useful when dealing with objects dependent on non-insertable Salesforce objects. +The `select` functionality is designed to streamline the mapping process by enabling the selection of specific records directly from Salesforce for lookups. This feature is particularly useful when dealing with non-insertable Salesforce objects and ensures that pre-existing records are used rather than inserting new ones. The selection process is highly customizable with various strategies, filters, and additional capabilities that provide flexibility and precision in data mapping. ```yaml -Select Accounts: +Account: sf_object: Account - action: select - selection_strategy: standard - selection_filter: WHERE Name IN ('Bluth Company', 'Camacho PLC') fields: - Name - - AccountNumber -Insert Contacts: + - Description + +Contact: sf_object: Contact - action: insert fields: - LastName + - Email lookups: AccountId: table: Account + +Lead: + sf_object: Lead + fields: + - LastName + - Company + +Event: + sf_object: Event + action: select + select_options: + strategy: similarity + filter: WHERE Subject IN ('Sample Event') + priority_fields: + - Subject + - WhoId + threshold: 0.3 + fields: + - Subject + - DurationInMinutes + - ActivityDateTime + lookups: + WhoId: + table: + - Contact + - Lead + WhatId: + table: Account ``` -The `Select Accounts` section in this YAML demonstrates how to fetch specific records from your Salesforce org. These selected Account records will then be referenced by the subsequent `Insert Contacts` section via lookups, ensuring that new Contacts are linked to the pre-existing Accounts chosen in the `select` step rather than relying on any newly inserted Account records. +--- + +#### Selection Strategies + +- **`standard` Strategy:** + The `standard` selection strategy retrieves records from Salesforce in the same order as they appear, applying any specified filters and sorting criteria. This method ensures that records are selected without any prioritization based on similarity or randomness, offering a straightforward way to pull the desired data. + +- **`similarity` Strategy:** + The `similarity` strategy is used when you need to find records in Salesforce that closely resemble the records defined in your SQL file. This strategy performs a similarity match between the records in the SQL file and those in Salesforce. In addition to comparing the fields of the record itself, this strategy includes the fields of parent records (up to one level) for a more granular and accurate match. + +- **`random` Strategy:** + The `random` selection strategy randomly assigns records picked from the target org. This method is useful when the selection order does not matter, and you simply need to fetch records in a randomized manner. + +--- + +#### Selection Filters + +The selection `filter` provides a flexible way to refine the records selected by using any functionality supported by SOQL. This includes filtering, sorting, and limiting records based on specific conditions, such as using the `WHERE` clause to filter records by field values, the `ORDER BY` clause to sort records in ascending or descending order, and the `LIMIT` clause to restrict the number of records returned. Essentially, any feature available in SOQL for record selection is supported here, allowing you to tailor the selection process to your precise needs and ensuring only the relevant records are included in the mapping process. + +--- + +#### Priority Fields + +The `priority_fields` feature enables you to specify a subset of fields in your mapping step that will have more weight during the similarity matching process. When similarity matching is performed, these priority fields will be given greater importance compared to other fields, allowing for a more refined match. + +This feature is particularly useful when certain fields are more critical in defining the identity or relevance of a record, ensuring that these fields have a stronger influence in the selection process. + +--- + +#### Select + Insert -#### Selection Strategy +This feature allows you to either select or insert records based on a similarity threshold. When using the `select` action with the `similarity` strategy, you can specify a `threshold` value between `0` and `1`, where `0` represents a perfect match and `1` signifies no similarity. -The `selection_strategy` dictates how these records are chosen: +- **Select Records:** + If a record from your SQL file has a similarity score below the threshold, it will be selected from the target org. -- `standard`: This strategy fetches records from the org in the same order as they appear, respecting any filtering applied via `selection_filter`. -- `similarity`: This strategy is employed when you want to find records in the org that closely resemble those defined in your SQL file. -- `random`: As the name suggests, this strategy randomly selects records from the org. +- **Insert Records:** + If the similarity score exceeds the threshold, the record will be inserted into the target org instead of being selected. -#### Selection Filter +This feature is particularly useful during version upgrades, where records that closely match can be selected, while those that do not match sufficiently can be inserted into the target org. -The `selection_filter` acts as a versatile SOQL clause, providing fine-grained control over record selection. It allows filtering with `WHERE`, sorting with `ORDER BY`, limiting with `LIMIT`, and potentially utilizing other SOQL capabilities, ensuring you select the precise records needed for your chosen `selection_strategy`. +--- ### Database Mapping From d2dbddd3042b0742bc7c393e2c8ee87cf0d888ba Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 19 Nov 2024 20:31:12 +0530 Subject: [PATCH 47/65] Add divider in doc --- docs/data.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/data.md b/docs/data.md index a3e96275c1..c449783af7 100644 --- a/docs/data.md +++ b/docs/data.md @@ -250,6 +250,8 @@ Insert Accounts: Whenever `update_key` is supplied, the action must be `upsert` and vice versa. +--- + ### Selects The `select` functionality is designed to streamline the mapping process by enabling the selection of specific records directly from Salesforce for lookups. This feature is particularly useful when dealing with non-insertable Salesforce objects and ensures that pre-existing records are used rather than inserting new ones. The selection process is highly customizable with various strategies, filters, and additional capabilities that provide flexibility and precision in data mapping. From 8a984fd14cae8f5cb836b9182a9f813e6ae5dce2 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Fri, 22 Nov 2024 17:17:22 +0530 Subject: [PATCH 48/65] Update documentation --- docs/data.md | 39 +++++++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/docs/data.md b/docs/data.md index c449783af7..9badb404e8 100644 --- a/docs/data.md +++ b/docs/data.md @@ -283,7 +283,7 @@ Event: action: select select_options: strategy: similarity - filter: WHERE Subject IN ('Sample Event') + filter: WHERE Subject LIKE 'Meeting%' priority_fields: - Subject - WhoId @@ -305,11 +305,13 @@ Event: #### Selection Strategies +The `strategy` parameter determines how records are selected from the target org. It is **optional**; if no strategy is specified, the `standard` strategy will be applied by default. + - **`standard` Strategy:** - The `standard` selection strategy retrieves records from Salesforce in the same order as they appear, applying any specified filters and sorting criteria. This method ensures that records are selected without any prioritization based on similarity or randomness, offering a straightforward way to pull the desired data. + The `standard` selection strategy retrieves records from target org in the same order as they appear, applying any specified filters and sorting criteria. This method ensures that records are selected without any prioritization based on similarity or randomness, offering a straightforward way to pull the desired data. - **`similarity` Strategy:** - The `similarity` strategy is used when you need to find records in Salesforce that closely resemble the records defined in your SQL file. This strategy performs a similarity match between the records in the SQL file and those in Salesforce. In addition to comparing the fields of the record itself, this strategy includes the fields of parent records (up to one level) for a more granular and accurate match. + The `similarity` strategy is used when you need to find records in the target org that closely resemble the records defined in your SQL file. This strategy performs a similarity match between the records in the SQL file and those in the target org. In addition to comparing the fields of the record itself, this strategy includes the fields of parent records (up to one level) for a more granular and accurate match. - **`random` Strategy:** The `random` selection strategy randomly assigns records picked from the target org. This method is useful when the selection order does not matter, and you simply need to fetch records in a randomized manner. @@ -320,17 +322,21 @@ Event: The selection `filter` provides a flexible way to refine the records selected by using any functionality supported by SOQL. This includes filtering, sorting, and limiting records based on specific conditions, such as using the `WHERE` clause to filter records by field values, the `ORDER BY` clause to sort records in ascending or descending order, and the `LIMIT` clause to restrict the number of records returned. Essentially, any feature available in SOQL for record selection is supported here, allowing you to tailor the selection process to your precise needs and ensuring only the relevant records are included in the mapping process. +This parameter is **optional**; and if not specified, no filter will apply. + --- #### Priority Fields The `priority_fields` feature enables you to specify a subset of fields in your mapping step that will have more weight during the similarity matching process. When similarity matching is performed, these priority fields will be given greater importance compared to other fields, allowing for a more refined match. +This parameter is **optional**; and if not specified, all fields will be considered with same priority. + This feature is particularly useful when certain fields are more critical in defining the identity or relevance of a record, ensuring that these fields have a stronger influence in the selection process. --- -#### Select + Insert +#### Threshold This feature allows you to either select or insert records based on a similarity threshold. When using the `select` action with the `similarity` strategy, you can specify a `threshold` value between `0` and `1`, where `0` represents a perfect match and `1` signifies no similarity. @@ -340,10 +346,35 @@ This feature allows you to either select or insert records based on a similarity - **Insert Records:** If the similarity score exceeds the threshold, the record will be inserted into the target org instead of being selected. +This parameter is **optional**; if not specified, no threshold will be applied and all records will default to be selected. + This feature is particularly useful during version upgrades, where records that closely match can be selected, while those that do not match sufficiently can be inserted into the target org. --- +#### Example + +To demonstrate the `select` functionality, consider the example of the `Event` entity, which utilizes the `similarity` strategy, a filter condition, and other advanced options to select matching records effectively as given in the yaml above. + +1. **Basic Object Configuration**: + + - The `Account`, `Contact`, and `Lead` objects are configured for straightforward field mapping. + - A `lookup` is defined on the `Contact` object to map `AccountId` to the `Account` table. + +2. **Advanced `Event` Object Mapping**: + - **Action**: The `Event` object uses the `select` action, meaning records are selected rather than inserted. + - **Strategy**: The `similarity` strategy matches `Event` records in target org that are similar to those defined in the SQL file. + - **Filter**: Only `Event` records with a `Subject` field starting with "Meeting" are considered. + - **Priority Fields**: The `Subject` and `WhoId` fields are given more weight during similarity matching. + - **Threshold**: A similarity score of 0.3 is used to determine whether records are selected or inserted. + - **Lookups**: + - The `WhoId` field looks up records from either the `Contact` or `Lead` objects. + - The `WhatId` field looks up records from the `Account` object. + +This example highlights how the `select` functionality can be applied in real-world scenarios, such as selecting `Event` records that meet specific criteria while considering similarity, filters, and priority fields. + +--- + ### Database Mapping CumulusCI's definition format includes considerable flexibility for use From e53f191ff3f281374e42df5e0cc76d2c7046a113 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Wed, 27 Nov 2024 00:01:43 +0530 Subject: [PATCH 49/65] Add integration tests for all selection strategies --- ...estSelect.test_select_random_strategy.yaml | 147 +++++ ...test_select_similarity_annoy_strategy.yaml | 123 ++++ ...similarity_select_and_insert_strategy.yaml | 313 ++++++++++ ...arity_select_and_insert_strategy_bulk.yaml | 550 ++++++++++++++++++ ...elect.test_select_similarity_strategy.yaml | 175 ++++++ ...tSelect.test_select_standard_strategy.yaml | 147 +++++ cumulusci/tasks/bulkdata/tests/test_select.py | 179 ++++++ datasets/select/random_mapping.yml | 22 + datasets/select/random_sample.sql | 49 ++ datasets/select/similarity_annoy_sample.sql | 161 +++++ datasets/select/similarity_mapping.yml | 22 + datasets/select/similarity_sample.sql | 49 ++ .../similarity_select_insert_mapping.yml | 47 ++ .../similarity_select_insert_mapping_bulk.yml | 47 ++ .../similarity_select_insert_sample.sql | 62 ++ datasets/select/standard_mapping.yml | 22 + 16 files changed, 2115 insertions(+) create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_annoy_strategy.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy_bulk.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml create mode 100644 cumulusci/tasks/bulkdata/tests/test_select.py create mode 100644 datasets/select/random_mapping.yml create mode 100644 datasets/select/random_sample.sql create mode 100644 datasets/select/similarity_annoy_sample.sql create mode 100644 datasets/select/similarity_mapping.yml create mode 100644 datasets/select/similarity_sample.sql create mode 100644 datasets/select/similarity_select_insert_mapping.yml create mode 100644 datasets/select/similarity_select_insert_mapping_bulk.yml create mode 100644 datasets/select/similarity_select_insert_sample.sql create mode 100644 datasets/select/standard_mapping.yml diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml new file mode 100644 index 0000000000..508be49cb4 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml @@ -0,0 +1,147 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMDQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMDQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements'%20LIMIT%205 + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 5,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n } ]\n}" \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_annoy_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_annoy_strategy.yaml new file mode 100644 index 0000000000..d4f51b3308 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_annoy_strategy.yaml @@ -0,0 +1,123 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMEQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMFQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy.yaml new file mode 100644 index 0000000000..4bebf958e1 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy.yaml @@ -0,0 +1,313 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id008 + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id009 + - *id009 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMEQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMFQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Name": "Tom Cruise", "Description": + "Some Description", "Phone": "123456", "AccountNumber": "123", "attributes": + {"type": "Account"}}, {"Name": "Bob The Builder", "Description": "Some Description", + "Phone": "123456", "AccountNumber": "123", "attributes": {"type": "Account"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0019H00000H28uAQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0019H00000H28uBQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: PATCH + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Id": "0019H00000H28uBQAR", "ParentId": + "0019H00000H28uAQAR", "attributes": {"type": "Account"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0019H00000H28uBQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact", "AccountId": "0019H00000H28uAQAR", + "attributes": {"type": "Contact"}}, {"LastName": "Contact", "AccountId": "0019H00000H28uBQAR", + "attributes": {"type": "Contact"}}, {"LastName": "Contact", "attributes": {"type": + "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BcB5lQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BcB5mQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BcB5nQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "First Lead", "Company": + "Salesforce", "attributes": {"type": "Lead"}}, {"LastName": "Second Lead", "Company": + "Salesforce", "attributes": {"type": "Lead"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"00Q9H00000C6pycUAB\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"00Q9H00000C6pydUAB\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Subject": "third record!!!!!!!!", "DurationInMinutes": + "31", "ActivityDateTime": "2024-11-07T07:00:00.000+0000", "WhoId": "0039H00000BcB5mQAF", + "WhatId": "0019H00000H28uAQAR", "attributes": {"type": "Event"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"00U9H000000S01tUAC\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Event + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 0,\n \"name\" : \"Event\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20TYPEOF%20Who%20WHEN%20Contact%20THEN%20LastName,%20Email%20WHEN%20Lead%20THEN%20LastName,%20Company%20ELSE%20Id%20END,%20TYPEOF%20What%20WHEN%20Account%20THEN%20Name,%20Description,%20Phone,%20AccountNumber%20ELSE%20Id%20END,%20Subject,%20DurationInMinutes,%20ActivityDateTime%20FROM%20Event + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 3,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvRUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvRUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : null,\n \"Subject\" : \"Test Event 2\",\n \"DurationInMinutes\" + : 60,\n \"ActivityDateTime\" : \"2024-11-07T07:00:00.000+0000\"\n }, {\n + \ \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvSUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvSUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : {\n \"attributes\" : {\n \"type\" : \"Account\",\n + \ \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n + \ },\n \"Name\" : \"Tom Cruise\",\n \"Description\" : \"Some + Description\",\n \"Phone\" : \"123456\",\n \"AccountNumber\" : \"123\"\n + \ },\n \"Subject\" : \"Test Event 1\",\n \"DurationInMinutes\" : 60,\n + \ \"ActivityDateTime\" : \"2024-11-07T07:00:00.000+0000\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvTUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvTUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vuQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : {\n \"attributes\" : {\n \"type\" : \"Account\",\n + \ \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n + \ },\n \"Name\" : \"Tom Cruise\",\n \"Description\" : \"Some + Description\",\n \"Phone\" : \"123456\",\n \"AccountNumber\" : \"123\"\n + \ },\n \"Subject\" : \"Test Event 3\",\n \"DurationInMinutes\" : 60,\n + \ \"ActivityDateTime\" : \"3156-11-12T13:00:00.000+0000\"\n } ]\n}" + + - request: + body: insertEventParallelCSV + headers: + Content-Type: + - application/xml; charset=UTF-8 + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + response: + body: + string: "\n + 7509H0000016HP0QAM\n insert\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T09:28:59.000Z\n + 2024-11-21T09:28:59.000Z\n Open\n + Parallel\n CSV\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + headers: *id006 + status: + code: 201 + message: Created + + - request: + body: queryEventParallelJSON + headers: + Content-Type: + - application/xml; charset=UTF-8 + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + response: + body: + string: "\n + 7509H0000016HQbQAM\n query\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T09:28:48.000Z\n + 2024-11-21T09:28:48.000Z\n Open\n + Parallel\n JSON\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + headers: *id006 + status: + code: 201 + message: Created \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy_bulk.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy_bulk.yaml new file mode 100644 index 0000000000..92ff0a2061 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_select_and_insert_strategy_bulk.yaml @@ -0,0 +1,550 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id008 + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id009 + - *id009 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMEQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMFQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Name": "Tom Cruise", "Description": + "Some Description", "Phone": "123456", "AccountNumber": "123", "attributes": + {"type": "Account"}}, {"Name": "Bob The Builder", "Description": "Some Description", + "Phone": "123456", "AccountNumber": "123", "attributes": {"type": "Account"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0019H00000H28uAQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0019H00000H28uBQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: PATCH + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Id": "0019H00000H28uBQAR", "ParentId": + "0019H00000H28uAQAR", "attributes": {"type": "Account"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0019H00000H28uBQAR\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact", "AccountId": "0019H00000H28uAQAR", + "attributes": {"type": "Contact"}}, {"LastName": "Contact", "AccountId": "0019H00000H28uBQAR", + "attributes": {"type": "Contact"}}, {"LastName": "Contact", "attributes": {"type": + "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BcB5lQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BcB5mQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BcB5nQAF\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "First Lead", "Company": + "Salesforce", "attributes": {"type": "Lead"}}, {"LastName": "Second Lead", "Company": + "Salesforce", "attributes": {"type": "Lead"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"00Q9H00000C6pycUAB\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"00Q9H00000C6pydUAB\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"Subject": "third record!!!!!!!!", "DurationInMinutes": + "31", "ActivityDateTime": "2024-11-07T07:00:00.000+0000", "WhoId": "0039H00000BcB5mQAF", + "WhatId": "0019H00000H28uAQAR", "attributes": {"type": "Event"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"00U9H000000S01tUAC\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Event + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 0,\n \"name\" : \"Event\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20TYPEOF%20Who%20WHEN%20Contact%20THEN%20LastName,%20Email%20WHEN%20Lead%20THEN%20LastName,%20Company%20ELSE%20Id%20END,%20TYPEOF%20What%20WHEN%20Account%20THEN%20Name,%20Description,%20Phone,%20AccountNumber%20ELSE%20Id%20END,%20Subject,%20DurationInMinutes,%20ActivityDateTime%20FROM%20Event + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 3,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvRUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvRUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : null,\n \"Subject\" : \"Test Event 2\",\n \"DurationInMinutes\" + : 60,\n \"ActivityDateTime\" : \"2024-11-07T07:00:00.000+0000\"\n }, {\n + \ \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvSUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvSUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : {\n \"attributes\" : {\n \"type\" : \"Account\",\n + \ \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n + \ },\n \"Name\" : \"Tom Cruise\",\n \"Description\" : \"Some + Description\",\n \"Phone\" : \"123456\",\n \"AccountNumber\" : \"123\"\n + \ },\n \"Subject\" : \"Test Event 1\",\n \"DurationInMinutes\" : 60,\n + \ \"ActivityDateTime\" : \"2024-11-07T07:00:00.000+0000\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvTUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvTUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vuQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n + \ \"What\" : {\n \"attributes\" : {\n \"type\" : \"Account\",\n + \ \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n + \ },\n \"Name\" : \"Tom Cruise\",\n \"Description\" : \"Some + Description\",\n \"Phone\" : \"123456\",\n \"AccountNumber\" : \"123\"\n + \ },\n \"Subject\" : \"Test Event 3\",\n \"DurationInMinutes\" : 60,\n + \ \"ActivityDateTime\" : \"3156-11-12T13:00:00.000+0000\"\n } ]\n}" + + - request: + body: insertEventParallelCSV + headers: + Content-Type: + - application/xml; charset=UTF-8 + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + response: + body: + string: "\n + 7509H0000016HP0QAM\n insert\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T09:28:59.000Z\n + 2024-11-21T09:28:59.000Z\n Open\n + Parallel\n CSV\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + headers: *id006 + status: + code: 201 + message: Created + + - request: + body: queryEventParallelJSON + headers: + Content-Type: + - application/xml; charset=UTF-8 + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + response: + body: + string: "\n + 7509H0000016HQbQAM\n query\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T09:28:48.000Z\n + 2024-11-21T09:28:48.000Z\n Open\n + Parallel\n JSON\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + headers: *id006 + status: + code: 201 + message: Created + + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Event + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 0,\n \"name\" : \"Event\"\n + \ } ]\n}" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + body: queryEventParallelJSON + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "\n + 7509H0000016GUXQA2\n query\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T08:31:31.000Z\n + 2024-11-21T08:31:31.000Z\n Open\n + Parallel\n JSON\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + + - request: + body: SELECT Id, TYPEOF Who WHEN Contact THEN LastName, Email WHEN Lead THEN LastName, + Company ELSE Id END, TYPEOF What WHEN Account THEN Name, Description, Phone, + AccountNumber ELSE Id END, Subject, DurationInMinutes, ActivityDateTime FROM + Event + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip + Connection: + - keep-alive + Content-Length: + - '240' + Content-Type: + - application/json; charset=UTF-8 + User-Agent: + - python-requests/2.29.0 + X-SFDC-Session: + - 00D9H000001G8kD!AQEAQKnPHoqLEySVDQuVIfqbFrp2wV5ervZcI6KMjgCLVlnzzKP8f.v_F8.md5oB1YKAYSO8v4awtyK_JbjDWWeXGgN.qMj2 + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job/7509H0000016GUXQA2/batch + + response: + status: + code: 200 + message: OK + + body: + string: '{"apexProcessingTime":0,"apiActiveProcessingTime":0,"createdDate":"2024-11-21T08:31:31.000+0000","id":"7519H000001XiCzQAK","jobId":"7509H0000016GUXQA2","numberRecordsFailed":0,"numberRecordsProcessed":0,"state":"Queued","stateMessage":null,"systemModstamp":"2024-11-21T08:31:31.000+0000","totalProcessingTime":0}' + headers: + Cache-Control: + - no-cache,must-revalidate,max-age=0,no-store,private + Connection: + - keep-alive + Content-Type: + - application/json + Date: + - Thu, 21 Nov 2024 09:28:49 GMT + Location: + - /services/async/62.0/job/7509H0000016HQbQAM/batch/7519H000001XijGQAS + Server: + - sfdcedge + Set-Cookie: + - CookieConsentPolicy=0:1; path=/; expires=Fri, 21-Nov-2025 09:28:48 GMT; Max-Age=31536000; + secure + - LSKey-c$CookieConsentPolicy=0:1; path=/; expires=Fri, 21-Nov-2025 09:28:48 + GMT; Max-Age=31536000; secure + - BrowserId=A0zCeqfrEe-vb4MynYYcKw; domain=.salesforce.com; path=/; expires=Fri, + 21-Nov-2025 09:28:48 GMT; Max-Age=31536000; secure; SameSite=None + Strict-Transport-Security: + - max-age=63072000; includeSubDomains + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + X-Request-Id: + - dd3e2037d40c842e3fcb1f1606956c41 + X-Robots-Tag: + - none + X-SFDC-Request-Id: + - dd3e2037d40c842e3fcb1f1606956c41 + content-length: + - '312' + status: + code: 201 + message: Created + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/async/62.0/job/7509H0000016GUXQA2/batch/7519H000001XiCzQAK + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: + Content-Type: + - application/json + body: + string: '{"apexProcessingTime":0,"apiActiveProcessingTime":0,"createdDate":"2024-11-21T08:31:31.000+0000","id":"7519H000001XiCzQAK","jobId":"7509H0000016GUXQA2","numberRecordsFailed":0,"numberRecordsProcessed":0,"state":"Completed","stateMessage":null,"systemModstamp":"2024-11-21T08:31:32.000+0000","totalProcessingTime":0}' + + - request: + method: GET + uri: https://orgname.my.salesforce.com//services/async/62.0/job/7509H0000016GUXQA2/batch/7519H000001XiCzQAK/result + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: + Content-Type: + - application/json + body: + string: '["7529H000000xO6L"]' + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/async/62.0/job/7509H0000016GUXQA2/batch/7519H000001XiCzQAK + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: + Content-Type: + - application/json + body: + string: '{"apexProcessingTime":0,"apiActiveProcessingTime":0,"createdDate":"2024-11-21T08:31:31.000+0000","id":"7519H000001XiCzQAK","jobId":"7509H0000016GUXQA2","numberRecordsFailed":0,"numberRecordsProcessed":0,"state":"Completed","stateMessage":null,"systemModstamp":"2024-11-21T08:31:32.000+0000","totalProcessingTime":0}' + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/async/62.0/job/7509H0000016GUXQA2/batch/7519H000001XiCzQAK/result/7529H000000xO6L?format=json + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: + Content-Type: + - application/json + body: + string: "[ {\n \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : + \"/services/data/v62.0/sobjects/Event/00U9H000000RzvRUAS\"\n },\n \"Id\" + : \"00U9H000000RzvRUAS\",\n \"Who\" : {\n \"attributes\" : {\n \"type\" + : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n \"What\" + : null,\n \"Subject\" : \"Test Event 2\",\n \"DurationInMinutes\" : 60,\n + \ \"ActivityDateTime\" : 1730962800000\n}, {\n \"attributes\" : {\n \"type\" + : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvSUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvSUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vtQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n \"What\" + : {\n \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" + : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n },\n \"Name\" + : \"Tom Cruise\",\n \"Description\" : \"Some Description\",\n \"Phone\" + : \"123456\",\n \"AccountNumber\" : \"123\"\n },\n \"Subject\" : \"Test + Event 1\",\n \"DurationInMinutes\" : 60,\n \"ActivityDateTime\" : 1730962800000\n}, + {\n \"attributes\" : {\n \"type\" : \"Event\",\n \"url\" : \"/services/data/v62.0/sobjects/Event/00U9H000000RzvTUAS\"\n + \ },\n \"Id\" : \"00U9H000000RzvTUAS\",\n \"Who\" : {\n \"attributes\" + : {\n \"type\" : \"Contact\",\n \"url\" : \"/services/data/v62.0/sobjects/Contact/0039H00000Bc8vuQAB\"\n + \ },\n \"LastName\" : \"Contact\",\n \"Email\" : null\n },\n \"What\" + : {\n \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" + : \"/services/data/v62.0/sobjects/Account/0019H00000H25ppQAB\"\n },\n \"Name\" + : \"Tom Cruise\",\n \"Description\" : \"Some Description\",\n \"Phone\" + : \"123456\",\n \"AccountNumber\" : \"123\"\n },\n \"Subject\" : \"Test + Event 3\",\n \"DurationInMinutes\" : 60,\n \"ActivityDateTime\" : 37453842000000\n} + ]" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + body: queryEventParallelJSON + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "\n + 7509H0000016GUXQA2\n query\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T08:31:31.000Z\n + 2024-11-21T08:31:31.000Z\n Open\n + Parallel\n JSON\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/async/62.0/job + body: insertEventParallelCSV + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "\n + 7509H0000016HP0QAM\n insert\n Event\n + 0059H0000059qXZQAY\n 2024-11-21T09:28:59.000Z\n + 2024-11-21T09:28:59.000Z\n Open\n + Parallel\n CSV\n + 0\n 0\n + 0\n 0\n + 0\n 0\n + 0\n 62.0\n 0\n + 0\n 0\n + 0\n" + \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml new file mode 100644 index 0000000000..31897e7650 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml @@ -0,0 +1,175 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMEQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMFQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + + # - request: + # method: POST + # uri: https://orgname.my.salesforce.com/services/data/vxx.0/composite/sobjects + # body: '{"allOrNone": false, "records": [{"Name": "Tom Cruise", "Description": "Some Description", "attributes": + # {"type": "Account"}}]}' + # headers: &id004 + # Request-Headers: + # - Elided + # response: + # status: &id005 + # code: 200 + # message: OK + # headers: &id006 + # Content-Type: + # - application/json;charset=UTF-8 + # Others: Elided + # body: + # string: + # "[ {\n \"id\" : \"0015500001QdZPKAA3\",\n \"success\" : true,\n \"\ + # errors\" : [ ]\n} ]" + + # - request: + # method: POST + # uri: https://orgname.my.salesforce.com/services/data/vxx.0/composite/sobjects + # body: '{"allOrNone": false, "records": [{"Name": "Sitwell-Bluth", "attributes": + # {"type": "Account"}}]}' + # headers: *id004 + # response: + # status: *id005 + # headers: *id006 + # body: + # string: + # "[ {\n \"id\" : \"0015500001QdZPKAA3\",\n \"success\" : true,\n \"\ + # errors\" : [ ]\n} ]" + # - request: + # method: POST + # uri: https://orgname.my.salesforce.com/services/data/vxx.0/composite/sobjects + # body: '{"allOrNone": false, "records": [{"Name": "Sitwell-Bluth", "attributes": + # {"type": "Account"}}]}' + # headers: *id004 + # response: + # status: *id005 + # headers: *id006 + # body: + # string: + # "[ {\n \"id\" : \"0015500001QdZPKAA3\",\n \"success\" : true,\n \"\ + # errors\" : [ ]\n} ]" + + + \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml new file mode 100644 index 0000000000..508be49cb4 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml @@ -0,0 +1,147 @@ +version: 1 +interactions: + - &id001 + include_file: GET_sobjects_Global_describe.yaml + - &id002 + include_file: GET_sobjects_Account_describe.yaml + - *id001 + - *id002 + - *id002 + + - &id003 + include_file: GET_sobjects_Contact_describe.yaml + - *id001 + - *id003 + - *id003 + - &id007 + include_file: GET_sobjects_Opportunity_describe.yaml + - *id002 + - &id008 + include_file: GET_sobjects_Lead_describe.yaml # Added interaction for Lead + - *id001 + - &id009 + include_file: GET_sobjects_Event_describe.yaml # Added interaction for Event + - *id001 + - *id008 + - *id001 + - *id009 + - *id001 + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/limits/recordCount?sObjects=Account + body: null + headers: &id004 + Request-Headers: + - Elided + response: + status: + code: 200 + message: OK + headers: &id006 + Content-Type: + - application/json;charset=UTF-8 + Others: Elided + body: + string: "{\n \"sObjects\" : [ {\n \"count\" : 3,\n \"name\" : \"Account\"\n + \ } ]\n}" + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 10,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMEQA3\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMFQA3\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMGQA3\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMHQA3\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzyQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzyQAF\",\n \"Name\" : \"Tom Cruise\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1UzzQAF\"\n + \ },\n \"Id\" : \"0019H00000H1UzzQAF\",\n \"Name\" : \"Bob The Builder\",\n + \ \"Description\" : \"Some Description\",\n \"Phone\" : \"12345632\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V00QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V00QAF\",\n \"Name\" : \"Shah Rukh Khan\",\n + \ \"Description\" : \"Bollywood actor\",\n \"Phone\" : \"12345612\",\n + \ \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" : {\n \"type\" + : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V01QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V01QAF\",\n \"Name\" : \"Aamir Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n }, {\n \"attributes\" + : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1V02QAF\"\n + \ },\n \"Id\" : \"0019H00000H1V02QAF\",\n \"Name\" : \"Salman Khan\",\n + \ \"Description\" : \"Mr perfectionist, bollywood actor\",\n \"Phone\" + : \"12345623\",\n \"AccountNumber\" : \"123\"\n } ]\n}" + + + + + - request: + method: POST + uri: https://orgname.my.salesforce.com/services/data/v62.0/composite/sobjects + body: '{"allOrNone": false, "records": [{"LastName": "Contact of Tom Cruise", + "AccountId": "0019H00000H1RMDQA3", "attributes": {"type": "Contact"}}, {"LastName": + "Contact of Bob the Builder", "AccountId": "0019H00000H1RMDQA3", "attributes": + {"type": "Contact"}}, {"LastName": "Contact of SRK", "AccountId": "0019H00000H1RMDQA3", + "attributes": {"type": "Contact"}}]}' + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "[ {\n \"id\" : \"0039H00000BbbFBQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFCQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n}, {\n \"id\" : \"0039H00000BbbFDQAZ\",\n \"success\" : true,\n \"errors\" + : [ ]\n} ]" + + + - request: + method: GET + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements'%20LIMIT%205 + body: null + headers: *id004 + response: + status: + code: 200 + message: OK + headers: *id006 + body: + string: "{\n \"totalSize\" : 5,\n \"done\" : true,\n \"records\" : [ {\n + \ \"attributes\" : {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMDQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMEQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMFQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMGQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n }, {\n \"attributes\" : + {\n \"type\" : \"Account\",\n \"url\" : \"/services/data/v62.0/sobjects/Account/0019H00000H1RMHQA3\"\n + \ },\n \"Id\" : \"0019H00000H1RMDQA3\"\n } ]\n}" \ No newline at end of file diff --git a/cumulusci/tasks/bulkdata/tests/test_select.py b/cumulusci/tasks/bulkdata/tests/test_select.py new file mode 100644 index 0000000000..6512c829b0 --- /dev/null +++ b/cumulusci/tasks/bulkdata/tests/test_select.py @@ -0,0 +1,179 @@ +import re +from unittest import mock + +import pytest +import responses +import yaml + +from cumulusci.core.exceptions import BulkDataException +from cumulusci.tasks.bulkdata import LoadData +from cumulusci.tasks.bulkdata.step import DataApi, DataOperationStatus +from cumulusci.tests.util import CURRENT_SF_API_VERSION, mock_describe_calls + + +class TestSelect: + @pytest.mark.vcr() + def test_select_similarity_strategy( + self, create_task, cumulusci_test_repo_root, sf + ): + self._test_select_similarity_strategy( + "rest", create_task, cumulusci_test_repo_root, sf + ) + + @pytest.mark.vcr() + def test_select_similarity_select_and_insert_strategy( + self, create_task, cumulusci_test_repo_root, sf + ): + self._test_select_similarity_select_and_insert_strategy( + "rest", create_task, cumulusci_test_repo_root, sf + ) + + @pytest.mark.vcr(allow_playback_repeats=True) + def test_select_similarity_select_and_insert_strategy_bulk( + self, create_task, cumulusci_test_repo_root, sf + ): + self._test_select_similarity_select_and_insert_strategy_bulk( + "bulk", create_task, cumulusci_test_repo_root, sf + ) + + @pytest.mark.vcr() + def test_select_random_strategy(self, create_task, cumulusci_test_repo_root, sf): + self._test_select_random_strategy( + "rest", create_task, cumulusci_test_repo_root, sf + ) + + @pytest.mark.vcr() + def test_select_standard_strategy(self, create_task, cumulusci_test_repo_root, sf): + self._test_select_standard_strategy( + "rest", create_task, cumulusci_test_repo_root, sf + ) + + def _test_select_similarity_strategy( + self, api, create_task, cumulusci_test_repo_root, sf + ): + # seed sample data, using a mixture of inserts and + # upserts-into-empty (which should behave as inserts) + task = create_task( + LoadData, + { + "sql_path": cumulusci_test_repo_root + / "datasets/select/similarity_sample.sql", + "mapping": cumulusci_test_repo_root + / "datasets/select/similarity_mapping.yml", + "set_recently_viewed": False, + "enable_rollback": False, + }, + ) + + task() + + result = task.return_values + + assert ( + str(result) + == "{'step_results': {'Account': {'sobject': 'Account', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 5, 'total_row_errors': 0}, 'Contact': {'sobject': 'Contact', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}}}" + ) + + def _test_select_similarity_select_and_insert_strategy( + self, api, create_task, cumulusci_test_repo_root, sf + ): + # seed sample data, using a mixture of inserts and + # upserts-into-empty (which should behave as inserts) + task = create_task( + LoadData, + { + "sql_path": cumulusci_test_repo_root + / "datasets/select/similarity_select_insert_sample.sql", + "mapping": cumulusci_test_repo_root + / "datasets/select/similarity_select_insert_mapping.yml", + "set_recently_viewed": False, + "enable_rollback": False, + }, + ) + + task() + + result = task.return_values + + assert ( + str(result) + == "{'step_results': {'Account': {'sobject': 'Account', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 1, 'total_row_errors': 0}, 'Contact': {'sobject': 'Contact', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}, 'Lead': {'sobject': 'Lead', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 2, 'total_row_errors': 0}, 'Event': {'sobject': 'Event', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}}}" + ) + + def _test_select_similarity_select_and_insert_strategy_bulk( + self, api, create_task, cumulusci_test_repo_root, sf + ): + # seed sample data, using a mixture of inserts and + # upserts-into-empty (which should behave as inserts) + task = create_task( + LoadData, + { + "sql_path": cumulusci_test_repo_root + / "datasets/select/similarity_select_insert_sample.sql", + "mapping": cumulusci_test_repo_root + / "datasets/select/similarity_select_insert_mapping.yml", + "set_recently_viewed": False, + "enable_rollback": False, + }, + ) + + task() + + result = task.return_values + + assert ( + str(result) + == "{'step_results': {'Account': {'sobject': 'Account', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 1, 'total_row_errors': 0}, 'Contact': {'sobject': 'Contact', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}, 'Lead': {'sobject': 'Lead', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 2, 'total_row_errors': 0}, 'Event': {'sobject': 'Event', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}}}" + ) + + def _test_select_random_strategy( + self, api, create_task, cumulusci_test_repo_root, sf + ): + # seed sample data, using a mixture of inserts and + # upserts-into-empty (which should behave as inserts) + task = create_task( + LoadData, + { + "sql_path": cumulusci_test_repo_root + / "datasets/select/random_sample.sql", + "mapping": cumulusci_test_repo_root + / "datasets/select/random_mapping.yml", + "set_recently_viewed": False, + "enable_rollback": False, + }, + ) + + task() + + result = task.return_values + + assert ( + str(result) + == "{'step_results': {'Account': {'sobject': 'Account', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 5, 'total_row_errors': 0}, 'Contact': {'sobject': 'Contact', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}}}" + ) + + def _test_select_standard_strategy( + self, api, create_task, cumulusci_test_repo_root, sf + ): + # seed sample data, using a mixture of inserts and + # upserts-into-empty (which should behave as inserts) + task = create_task( + LoadData, + { + "sql_path": cumulusci_test_repo_root + / "datasets/select/random_sample.sql", + "mapping": cumulusci_test_repo_root + / "datasets/select/standard_mapping.yml", + "set_recently_viewed": False, + "enable_rollback": False, + }, + ) + + task() + + result = task.return_values + + assert ( + str(result) + == "{'step_results': {'Account': {'sobject': 'Account', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 5, 'total_row_errors': 0}, 'Contact': {'sobject': 'Contact', 'record_type': None, 'status': , 'job_errors': [], 'records_processed': 3, 'total_row_errors': 0}}}" + ) diff --git a/datasets/select/random_mapping.yml b/datasets/select/random_mapping.yml new file mode 100644 index 0000000000..64e2e05368 --- /dev/null +++ b/datasets/select/random_mapping.yml @@ -0,0 +1,22 @@ +Account: + sf_object: Account + api: rest + fields: + - Name + - Description + - Phone + - AccountNumber + action: select + select_options: + strategy: random + # threshold: 0.3 + +Contact: + sf_object: Contact + api: rest + fields: + - LastName + - Email + lookups: + AccountId: + table: Account diff --git a/datasets/select/random_sample.sql b/datasets/select/random_sample.sql new file mode 100644 index 0000000000..f3a6d88f2c --- /dev/null +++ b/datasets/select/random_sample.sql @@ -0,0 +1,49 @@ +BEGIN TRANSACTION; +CREATE TABLE "Account" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "Description" VARCHAR(255), + "NumberOfEmployees" VARCHAR(255), + "BillingStreet" VARCHAR(255), + "BillingCity" VARCHAR(255), + "BillingState" VARCHAR(255), + "BillingPostalCode" VARCHAR(255), + "BillingCountry" VARCHAR(255), + "ShippingStreet" VARCHAR(255), + "ShippingCity" VARCHAR(255), + "ShippingState" VARCHAR(255), + "ShippingPostalCode" VARCHAR(255), + "ShippingCountry" VARCHAR(255), + "Phone" VARCHAR(255), + "Fax" VARCHAR(255), + "Website" VARCHAR(255), + "AccountNumber" VARCHAR(255), + "ParentId" VARCHAR(255), + PRIMARY KEY (id) +); + +INSERT INTO "Account" VALUES('Account-1','Tom Cruise','Some Description','','','','','','','','','','','','12345632','','','123',''); +INSERT INTO "Account" VALUES('Account-2','Bob The Builder','Some Description','','','','','','','','','','','','12345632','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-3','Shah Rukh Khan','Bollywood actor','','','','','','','','','','','','12345612','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-4','Aamir Khan','Mr perfectionist, bollywood actor','','','','','','','','','','','','12345623','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-5','Salman Khan','Mr perfectionist, bollywood actor','','','','','','','','','','','','12345623','','','123','Account-1'); + + +CREATE TABLE "Contact" ( + id VARCHAR(255) NOT NULL, + "FirstName" VARCHAR(255), + "LastName" VARCHAR(255), + "Salutation" VARCHAR(255), + "Email" VARCHAR(255), + "Phone" VARCHAR(255), + "MobilePhone" VARCHAR(255), + "Title" VARCHAR(255), + "Birthdate" VARCHAR(255), + "AccountId" VARCHAR(255), + PRIMARY KEY (id) +); + + +INSERT INTO "Contact" VALUES('Contact-1','Mr','Contact of Tom Cruise','','','','','','','Account-1'); +INSERT INTO "Contact" VALUES('Contact-2','Test','Contact of Bob the Builder','','','','','','','Account-2'); +INSERT INTO "Contact" VALUES('Contact-3','Another','Contact of SRK','','','','','','','Account-3'); diff --git a/datasets/select/similarity_annoy_sample.sql b/datasets/select/similarity_annoy_sample.sql new file mode 100644 index 0000000000..60412cbd27 --- /dev/null +++ b/datasets/select/similarity_annoy_sample.sql @@ -0,0 +1,161 @@ + +BEGIN TRANSACTION; + +CREATE TABLE "Account" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "Description" VARCHAR(255), + "NumberOfEmployees" VARCHAR(255), + "BillingStreet" VARCHAR(255), + "BillingCity" VARCHAR(255), + "BillingState" VARCHAR(255), + "BillingPostalCode" VARCHAR(255), + "BillingCountry" VARCHAR(255), + "ShippingStreet" VARCHAR(255), + "ShippingCity" VARCHAR(255), + "ShippingState" VARCHAR(255), + "ShippingPostalCode" VARCHAR(255), + "ShippingCountry" VARCHAR(255), + "Phone" VARCHAR(255), + "Fax" VARCHAR(255), + "Website" VARCHAR(255), + "AccountNumber" VARCHAR(255), + "ParentId" VARCHAR(255), + PRIMARY KEY (id) +); + +CREATE TABLE "Contact" ( + id VARCHAR(255) NOT NULL, + "FirstName" VARCHAR(255), + "LastName" VARCHAR(255), + "Salutation" VARCHAR(255), + "Email" VARCHAR(255), + "Phone" VARCHAR(255), + "MobilePhone" VARCHAR(255), + "Title" VARCHAR(255), + "Birthdate" VARCHAR(255), + "AccountId" VARCHAR(255), + PRIMARY KEY (id) +); + +-- Insert Account records +INSERT INTO "Account" VALUES('Account-6','Tucker, Roberts and Young','Future-proofed bi-directional encryption','','','','','','','','','','','','715.903.8280x689','','','69532987',''); +INSERT INTO "Account" VALUES('Account-7','Richardson, Jones and Chen','Up-sized radical function','','','','','','','','','','','','368-764-8992','','','60964432',''); +INSERT INTO "Account" VALUES('Account-8','Perkins, Johnson and Schroeder','Ameliorated 24/7 analyzer','','','','','','','','','','','','(678)277-9800x041','','','92641585',''); +INSERT INTO "Account" VALUES('Account-9','Davis-Hernandez','Horizontal well-modulated secured line','','','','','','','','','','','','399-965-8911x480','','','41592661',''); +INSERT INTO "Account" VALUES('Account-10','Acevedo-Lawson','Reverse-engineered 3rdgeneration approach','','','','','','','','','','','','3065087056','','','65959628',''); +INSERT INTO "Account" VALUES('Account-11','Harris-Schroeder','Digitized tangible forecast','','','','','','','','','','','','5643586493','','','36154428',''); +INSERT INTO "Account" VALUES('Account-12','Anderson LLC','Total multi-state throughput','','','','','','','','','','','','122.508.0623x6277','','','58454581',''); +INSERT INTO "Account" VALUES('Account-13','Padilla-Sullivan','Universal disintermediate concept','','','','','','','','','','','','001-882-804-5645','','','47157364',''); +INSERT INTO "Account" VALUES('Account-14','Owens Group','Configurable high-level portal','','','','','','','','','','','','(053)422-8886x706','','','52014732',''); +INSERT INTO "Account" VALUES('Account-15','Ferguson-Ford','Robust upward-trending moratorium','','','','','','','','','','','','001-873-516-2659x5043','','','89903758',''); +INSERT INTO "Account" VALUES('Account-16','Chambers-Nelson','Operative multimedia Graphic Interface','','','','','','','','','','','','106.252.9110','','','6017392','Account-2'); +INSERT INTO "Account" VALUES('Account-17','Davidson, Johnson and Wilson','Persistent 4thgeneration archive','','','','','','','','','','','','(148)041-7089x1879','','','7645239','Account-3'); +INSERT INTO "Account" VALUES('Account-18','Smith-Lee','Diverse disintermediate benchmark','','','','','','','','','','','','+1-860-127-9836x780','','','39175426','Account-5'); +INSERT INTO "Account" VALUES('Account-19','Silva, Avila and Adkins','Future-proofed background policy','','','','','','','','','','','','001-713-221-4818x3867','','','3357478',''); +INSERT INTO "Account" VALUES('Account-20','Anderson, Harrington and Norton','User-friendly systematic functionalities','','','','','','','','','','','','466.416.8129','','','50482193',''); +INSERT INTO "Account" VALUES('Account-21','Bell-Armstrong','Operative human-resource info-mediaries','','','','','','','','','','','','572-522-6700x04982','','','59786157',''); +INSERT INTO "Account" VALUES('Account-22','Camacho, Rose and Dixon','Persevering optimizing paradigm','','','','','','','','','','','','211.758.9395x3663','','','46438696',''); +INSERT INTO "Account" VALUES('Account-23','Jackson PLC','Optional system-worthy array','','','','','','','','','','','','374.939.1227x406','','','40071429',''); +INSERT INTO "Account" VALUES('Account-24','Montoya, Wells and Daniels','Adaptive system-worthy installation','','','','','','','','','','','','219.441.4029','','','11912061',''); +INSERT INTO "Account" VALUES('Account-25','Maldonado, Jones and Moore','Synergized responsive matrix','','','','','','','','','','','','2531039427','','','42328240',''); + + +INSERT INTO "Contact" VALUES('Contact-4','Jacqueline','Brown','Mr.','christopher85@yahoo.com','(951)175-2430x21575','1256216622','Doctor, general practice','1967-05-08','Account-7'); +INSERT INTO "Contact" VALUES('Contact-5','Robert','Smith','Ms.','bwilkerson@hunt.org','(954)436-8286x4149','(838)557-1881','Research scientist (life sciences)','1993-06-29','Account-20'); +INSERT INTO "Contact" VALUES('Contact-6','Hannah','Duncan','Mr.','williamyoung@suarez.com','118-444-0564','360.346.8024','Nurse, adult','1981-07-26','Account-17'); +INSERT INTO "Contact" VALUES('Contact-7','Caitlin','Le','Mx.','jasonryan@foster-johnson.com','+1-889-214-3418x10487','906.009.5203','Designer, furniture','1974-02-26','Account-5'); +INSERT INTO "Contact" VALUES('Contact-8','Matthew','Fisher','Mr.','charlesjackson@gmail.com','712-329-6696x327','398.459.0661x7802','Chief Executive Officer','1966-08-01','Account-18'); +INSERT INTO "Contact" VALUES('Contact-9','Glenda','Kline','Mx.','olee@hotmail.com','+1-810-820-7245x408','550-664-6651x44430','Chiropractor','1981-04-29','Account-5'); +INSERT INTO "Contact" VALUES('Contact-10','Joyce','Anderson','Mr.','brownchristina@gmail.com','502.563.7470','864-440-8796x404','Bonds trader','1978-01-14','Account-15'); +INSERT INTO "Contact" VALUES('Contact-11','Leslie','Bennett','Mx.','doliver@dickerson.com','001-012-634-9713x009','001-110-127-0838x1754','Materials engineer','1998-05-29','Account-11'); +INSERT INTO "Contact" VALUES('Contact-12','Steven','Butler','Mx.','tgibson@yahoo.com','982.806.0149x61369','500.073.7758x029','Clinical scientist, histocompatibility and immunogenetics','1964-02-08','Account-11'); +INSERT INTO "Contact" VALUES('Contact-13','Tami','Thompson','Mx.','amber76@beck.com','(134)982-1925','8011741195','Runner, broadcasting/film/video','1997-01-20','Account-5'); +INSERT INTO "Contact" VALUES('Contact-14','Whitney','Fowler','Mx.','calvin49@black-wilson.com','5321396442','+1-739-600-7853x706','Communications engineer','1967-03-13','Account-9'); +INSERT INTO "Contact" VALUES('Contact-15','Joe','Rodriguez','Ind.','deborahstokes@yang.net','+1-797-865-4753x485','9915119043','Surveyor, minerals','1970-11-03','Account-1'); +INSERT INTO "Contact" VALUES('Contact-16','Carrie','Velasquez','Dr.','kelsey77@campbell.com','(329)100-2219x4869','(662)137-8951x4099','Chartered accountant','1967-12-08','Account-8'); +INSERT INTO "Contact" VALUES('Contact-17','Daniel','Gonzalez','Mrs.','ozimmerman@ward.com','494-844-9534x51762','282.514.7235x0060','Adult guidance worker','1980-04-07','Account-16'); +INSERT INTO "Contact" VALUES('Contact-18','Christian','Anderson','Mr.','bryanhiggins@johnson.net','001-768-964-7201x37163','+1-311-941-9226x545','Engineer, communications','1996-02-20','Account-19'); +INSERT INTO "Contact" VALUES('Contact-19','John','Reed','Mrs.','bradyrebecca@hudson-kelly.net','506.674.3181x77646','(193)410-6407x3228','Medical technical officer','1990-07-01','Account-15'); +INSERT INTO "Contact" VALUES('Contact-20','Amy','Smith','Dr.','robert58@quinn.com','001-471-180-1138x505','385-091-1669','Cabin crew','1972-07-08','Account-12'); +INSERT INTO "Contact" VALUES('Contact-21','Autumn','Murillo','Mr.','escobarjoshua@hotmail.com','0065834869','(033)013-5568x40028','Chartered legal executive (England and Wales)','1981-02-23','Account-6'); +INSERT INTO "Contact" VALUES('Contact-22','Cody','Hernandez','Misc.','malonejonathon@griffin-osborn.com','264-563-2199','908.076.5654x36421','Geologist, engineering','1988-06-03','Account-16'); +INSERT INTO "Contact" VALUES('Contact-23','Tyler','Bowers','Dr.','mark64@schultz-parker.net','+1-379-918-6249x6802','989-539-8926x76535','Sport and exercise psychologist','1982-12-02','Account-9'); +INSERT INTO "Contact" VALUES('Contact-24','Diana','Ryan','Mr.','kkidd@hotmail.com','356-330-4972x9013','(042)287-4061','Electrical engineer','1976-10-16','Account-11'); +INSERT INTO "Contact" VALUES('Contact-25','Jose','Novak','Miss','scurtis@hotmail.com','264-848-6378','2925896479','Geologist, engineering','1976-10-04','Account-12'); +INSERT INTO "Contact" VALUES('Contact-26','Maria','Weeks','Mr.','collinsjeffrey@olson.org','852.269.5714x2190','388.255.0264','Chartered legal executive (England and Wales)','1983-03-27','Account-13'); +INSERT INTO "Contact" VALUES('Contact-27','Christian','Boyd','Mx.','lgrant@yahoo.com','723.439.3183x41413','(483)287-2534x701','Production assistant, television','1979-12-17','Account-5'); +INSERT INTO "Contact" VALUES('Contact-28','Heidi','Huffman','Mr.','marcus29@franklin.com','001-397-797-9946x64647','815.662.3992x42610','Hospital doctor','1995-12-11','Account-16'); +INSERT INTO "Contact" VALUES('Contact-29','Lisa','Peck','Mr.','dbradford@christensen.info','(174)433-5387x4278','+1-817-361-3752x5011','Land/geomatics surveyor','1981-11-12','Account-10'); +INSERT INTO "Contact" VALUES('Contact-30','James','Evans','Dr.','kennedykim@foster.com','(836)722-6575x49179','(237)846-8347x4073','Musician','1980-01-26','Account-3'); +INSERT INTO "Contact" VALUES('Contact-31','Karen','Reilly','Mr.','paulterrell@hotmail.com','(809)024-0484x252','(662)455-4993x582','Biochemist, clinical','1955-03-07','Account-19'); +INSERT INTO "Contact" VALUES('Contact-32','Daniel','Gonzales','Mrs.','kent84@hotmail.com','907-045-5503x44414','859-011-9999','Community pharmacist','1979-03-18','Account-1'); +INSERT INTO "Contact" VALUES('Contact-33','Debbie','Davis','Mx.','audrey99@hotmail.com','+1-527-014-2246','+1-306-189-2702x4777','Armed forces operational officer','1981-09-15','Account-11'); +INSERT INTO "Contact" VALUES('Contact-34','Heidi','Smith','Mx.','jenniferpugh@gmail.com','8265960475','+1-291-670-9597x70096','Site engineer','1979-08-31','Account-3'); +INSERT INTO "Contact" VALUES('Contact-35','David','Huff','Dr.','warneremma@hotmail.com','001-108-842-7600','572-083-2511','Aeronautical engineer','2001-09-09','Account-17'); +INSERT INTO "Contact" VALUES('Contact-36','Anthony','Thompson','Mr.','johnrandolph@hotmail.com','(722)319-8352x19507','(984)646-1878x893','Energy engineer','1987-04-12','Account-16'); +INSERT INTO "Contact" VALUES('Contact-37','Brianna','Flores','Mr.','guerrerojohn@hotmail.com','(040)934-1423x458','005.356.8723','Journalist, newspaper','1962-03-14','Account-8'); +INSERT INTO "Contact" VALUES('Contact-38','Nathan','Alexander','Dr.','mccoylarry@duncan.info','200-374-4142x7395','(821)164-0381x13162','Scientist, clinical (histocompatibility and immunogenetics)','1991-08-11','Account-3'); +INSERT INTO "Contact" VALUES('Contact-39','Patty','Savage','Mx.','sandersstephen@hotmail.com','(112)877-0657x2996','001-407-135-9742x6586','Education officer, environmental','1988-09-07','Account-5'); +INSERT INTO "Contact" VALUES('Contact-40','Timothy','Hendrix','Mr.','jamesthomas@melendez.org','(930)747-5122x43545','326-032-4776','Maintenance engineer','1980-12-31','Account-13'); +INSERT INTO "Contact" VALUES('Contact-41','Mathew','Welch','Dr.','brandypatterson@mitchell.com','(372)821-0121','394-174-6163x14401','Doctor, hospital','1985-07-22','Account-9'); +INSERT INTO "Contact" VALUES('Contact-42','Rebecca','Lopez','Mr.','geoffrey12@haynes.com','(739)509-2550x56354','027-930-6580x6108','Engineer, building services','1976-07-15','Account-12'); +INSERT INTO "Contact" VALUES('Contact-43','Juan','Martinez','Mr.','rmyers@foster.com','410-301-1405','+1-679-823-1570','Economist','1991-08-15','Account-3'); +INSERT INTO "Contact" VALUES('Contact-44','Kimberly','Anderson','Mr.','brush@reid-allen.org','039.174.2088x15156','+1-926-533-8571x9711','Applications developer','2002-06-07','Account-19'); +INSERT INTO "Contact" VALUES('Contact-45','Steven','Johnson','Mx.','kristenlove@graham.com','323-478-6250x512','(913)638-0634x71085','Plant breeder/geneticist','1985-08-17','Account-13'); +INSERT INTO "Contact" VALUES('Contact-46','Diane','Castro','Mr.','jenniferespinoza@yahoo.com','(888)427-7854x17261','(343)337-0016x24802','Counsellor','2001-02-11','Account-2'); +INSERT INTO "Contact" VALUES('Contact-47','Kevin','Johnson','Mr.','lejuan@smith.com','(256)300-0666x3076','001-862-940-5100','Psychologist, clinical','1987-06-04','Account-20'); +INSERT INTO "Contact" VALUES('Contact-48','Amanda','Davis','Dr.','wileymary@yahoo.com','(480)208-9142','653.024.9216x56380','International aid/development worker','1977-08-22','Account-18'); +INSERT INTO "Contact" VALUES('Contact-49','Maria','Jimenez','Mr.','ljones@maldonado-hicks.org','+1-508-122-8616','7616362966','Accountant, chartered certified','1966-01-22','Account-13'); +INSERT INTO "Contact" VALUES('Contact-50','Patrick','Mccoy','Mrs.','mariajoseph@hotmail.com','(659)725-4524','962.156.1663','Catering manager','1961-01-01','Account-16'); +INSERT INTO "Contact" VALUES('Contact-51','Kristen','Suarez','Mx.','christina51@gmail.com','001-411-577-2094x758','889-250-7752','Chartered legal executive (England and Wales)','1994-06-23','Account-15'); +INSERT INTO "Contact" VALUES('Contact-52','Debbie','Alvarez','Mx.','tammymedina@hotmail.com','001-086-686-9414x15115','012-043-1931','Loss adjuster, chartered','1964-11-08','Account-13'); +INSERT INTO "Contact" VALUES('Contact-53','Traci','Banks','Dr.','tiffany64@gmail.com','308-249-7490','+1-583-349-6177x858','Librarian, academic','1967-02-27','Account-15'); +INSERT INTO "Contact" VALUES('Contact-54','Eric','Johnson','Mx.','aharris@cunningham.com','(102)107-1088','001-821-976-9439x923','Teacher, primary school','1992-07-09','Account-6'); +INSERT INTO "Contact" VALUES('Contact-55','Shawn','Diaz','Dr.','hochoa@martin.com','2335286273','+1-138-151-5601x23752','Sub','1962-09-10','Account-7'); +INSERT INTO "Contact" VALUES('Contact-56','Cynthia','Carroll','Dr.','jonathan75@espinoza.com','909-941-2179x15747','972-747-7021x87437','Volunteer coordinator','2001-04-26','Account-15'); +INSERT INTO "Contact" VALUES('Contact-57','Derek','English','Mr.','figueroalinda@larson.com','771-805-6663x3500','(929)813-8603x896','Acupuncturist','1956-03-02','Account-4'); +INSERT INTO "Contact" VALUES('Contact-58','Dean','Ortiz','Mr.','jonathan33@yahoo.com','286.477.8501x77097','335.033.8461x92224','Podiatrist','1969-01-28','Account-1'); +INSERT INTO "Contact" VALUES('Contact-59','Thomas','Watson','Mrs.','wrobertson@adams.com','+1-044-359-5440x3220','5242854984','Visual merchandiser','1977-05-17','Account-1'); +INSERT INTO "Contact" VALUES('Contact-60','Lynn','Frey','Mrs.','olivia38@schaefer.com','001-876-374-1841x70622','158.527.9951x1108','Operational researcher','1965-07-01','Account-3'); +INSERT INTO "Contact" VALUES('Contact-61','Jonathan','Steele','Dr.','brandon28@fields.com','001-645-936-4973x340','686.831.0030','Quarry manager','1972-06-09','Account-19'); +INSERT INTO "Contact" VALUES('Contact-62','Teresa','Williams','Dr.','yhood@cooper.com','(600)862-5939x599','001-262-786-9797','Equality and diversity officer','1967-07-10','Account-1'); +INSERT INTO "Contact" VALUES('Contact-63','Sandra','Henderson','Ms.','smithmichael@yahoo.com','001-059-111-8601x187','5057044225','Logistics and distribution manager','1961-11-07','Account-9'); +INSERT INTO "Contact" VALUES('Contact-64','Darrell','Stone','Mrs.','thomasmichelle@woods-tyler.com','(575)527-9862x16794','075.950.5314','Radiation protection practitioner','2003-07-07','Account-12'); +INSERT INTO "Contact" VALUES('Contact-65','Christopher','Stephens','Dr.','katrina23@gmail.com','(184)173-5357x5740','960.937.4682','Designer, fashion/clothing','1967-12-14','Account-4'); +INSERT INTO "Contact" VALUES('Contact-66','Jonathan','Sanders','Mr.','walkerethan@gmail.com','+1-288-991-4519x454','001-013-648-7553','Scientist, forensic','1970-05-04','Account-3'); +INSERT INTO "Contact" VALUES('Contact-67','Debra','Rodriguez','Ind.','sampsonamy@gmail.com','+1-570-020-1500x07002','800-841-6902x384','Administrator','1971-11-25','Account-8'); +INSERT INTO "Contact" VALUES('Contact-68','Barbara','Bates','Mx.','watsonbrandon@carpenter.org','(125)608-9445x280','001-352-204-9634x767','Aid worker','1974-12-24','Account-8'); +INSERT INTO "Contact" VALUES('Contact-69','Jerry','Davis','Dr.','umcfarland@hotmail.com','(944)188-4914','271.688.9384','Dietitian','1997-09-11','Account-6'); +INSERT INTO "Contact" VALUES('Contact-70','Eric','Turner','Dr.','kimberly51@massey-taylor.com','175.696.6542','+1-178-116-3595x475','Orthoptist','1959-12-03','Account-17'); +INSERT INTO "Contact" VALUES('Contact-71','Joanna','Benton','Dr.','lnash@hotmail.com','838.192.6818','020.272.6352','Therapist, horticultural','1986-04-09','Account-13'); +INSERT INTO "Contact" VALUES('Contact-72','Christopher','Stevens','Dr.','simpsonbilly@hotmail.com','038.162.8486x906','309-250-0812x3139','Clinical psychologist','1954-08-11','Account-18'); +INSERT INTO "Contact" VALUES('Contact-73','Erin','Barron','Mr.','robertjarvis@reed-johnson.com','7606999523','6153409570','Risk manager','1968-10-18','Account-19'); +INSERT INTO "Contact" VALUES('Contact-74','Wayne','Shelton','Dr.','leslie07@hotmail.com','(745)348-2609x0182','122-476-1588x59819','Dance movement psychotherapist','1975-05-27','Account-12'); +INSERT INTO "Contact" VALUES('Contact-75','Jessica','Hardy','Mx.','krollins@gmail.com','507.507.0232x57702','703.252.9694x28556','Surveyor, land/geomatics','1988-10-08','Account-10'); +INSERT INTO "Contact" VALUES('Contact-76','Ashley','Robinson','Miss','kimberly63@gmail.com','+1-033-702-4232x7829','001-029-710-7322','Sports coach','1970-10-15','Account-15'); +INSERT INTO "Contact" VALUES('Contact-77','Christina','Brooks','Dr.','ltaylor@hughes.info','(721)750-8969','(958)358-6059','Investment banker, corporate','1976-08-04','Account-16'); +INSERT INTO "Contact" VALUES('Contact-78','Anna','Glass','Mr.','ocardenas@hampton.com','(646)907-5188x343','314-776-3643x168','Chief Financial Officer','1958-01-02','Account-1'); +INSERT INTO "Contact" VALUES('Contact-79','Kimberly','Navarro','Ms.','adamslinda@smith.biz','001-914-318-0025x483','(484)635-0527x97649','Health promotion specialist','1958-03-09','Account-8'); +INSERT INTO "Contact" VALUES('Contact-80','Zachary','Hale','Mx.','jaredchristian@rogers.com','412-286-3270','001-749-000-1081x6632','Aeronautical engineer','1966-03-04','Account-16'); +INSERT INTO "Contact" VALUES('Contact-81','Jeffrey','Patterson','Mrs.','michael14@gmail.com','+1-971-161-3494x40567','322.869.0877x4269','Chartered loss adjuster','1997-12-30','Account-12'); +INSERT INTO "Contact" VALUES('Contact-82','Ashlee','Douglas','Dr.','ljohnson@yahoo.com','(107)073-2864x709','+1-543-955-1348x27165','Media buyer','1975-11-24','Account-1'); +INSERT INTO "Contact" VALUES('Contact-83','Amy','Jackson','Ms.','jbrown@yahoo.com','(656)107-4242','(300)274-8183x877','Radiographer, therapeutic','1977-05-04','Account-20'); +INSERT INTO "Contact" VALUES('Contact-84','Austin','Arnold','Mrs.','aevans@thompson.org','702.321.9550x57620','563.499.1591','Customer service manager','1965-04-23','Account-5'); +INSERT INTO "Contact" VALUES('Contact-85','Lisa','Hahn','Mrs.','pamelathomas@davis-mills.com','563.647.1985','516-184-8784x18409','Psychotherapist','1982-03-11','Account-9'); +INSERT INTO "Contact" VALUES('Contact-86','Michael','Rice','Mrs.','wwalker@gmail.com','136-191-2472','(012)569-2985x7448','Education administrator','1964-07-14','Account-7'); +INSERT INTO "Contact" VALUES('Contact-87','Judith','Ross','Mrs.','karen84@cook.com','001-896-116-2678','+1-591-808-0731x50857','Comptroller','1962-09-04','Account-7'); +INSERT INTO "Contact" VALUES('Contact-88','Debbie','Hooper','Ind.','james11@davis.com','319-281-3272x823','(066)287-5057x484','Probation officer','1965-09-09','Account-7'); +INSERT INTO "Contact" VALUES('Contact-89','Luis','Smith','Mrs.','yallen@becker-hunt.net','001-427-071-0883x18715','+1-776-803-7761','Manufacturing engineer','1990-07-14','Account-14'); +INSERT INTO "Contact" VALUES('Contact-90','Peter','Anderson','Dr.','allentyler@guzman.org','+1-393-254-9105x178','676.588.9551x635','Administrator, Civil Service','1967-07-06','Account-12'); +INSERT INTO "Contact" VALUES('Contact-91','John','Ward','Mr.','karlamarquez@orr.com','551.753.6658x8830','001-235-880-4273x489','Contracting civil engineer','1966-07-07','Account-17'); +INSERT INTO "Contact" VALUES('Contact-92','Susan','Colon','Mrs.','jerrywalker@knox.com','2886198694','139-647-8366x467','Warden/ranger','1961-08-17','Account-12'); +INSERT INTO "Contact" VALUES('Contact-93','Julie','Higgins','Dr.','curtis88@hotmail.com','856.472.4550','(436)489-2153','Passenger transport manager','1962-11-18','Account-4'); +INSERT INTO "Contact" VALUES('Contact-94','Allen','Robinson','Dr.','alexander94@ortega.com','901.887.7671x4722','7628475086','Biochemist, clinical','1980-06-29','Account-1'); +INSERT INTO "Contact" VALUES('Contact-95','Nathan','Yoder','Misc.','watsonmichael@wilson-benson.com','(799)922-5588','(943)647-6987x45290','Paramedic','1961-09-02','Account-3'); +INSERT INTO "Contact" VALUES('Contact-96','Sheryl','Mckee','Mr.','michael29@gmail.com','2373626803','4779103743','Furniture designer','1960-03-18','Account-3'); +INSERT INTO "Contact" VALUES('Contact-97','Melissa','Browning','Mrs.','daniel78@burns.org','001-910-900-7974','(293)760-7748','Quantity surveyor','1956-04-28','Account-20'); +INSERT INTO "Contact" VALUES('Contact-98','Elizabeth','Preston','Mrs.','roberttaylor@gmail.com','+1-367-895-8706x8070','001-083-228-6710x5234','Media buyer','1993-06-02','Account-6'); +INSERT INTO "Contact" VALUES('Contact-99','Karen','Goodwin','Mr.','stephen15@barber-perkins.com','640-922-2069x071','001-340-296-7013x02254','Therapist, art','1979-01-20','Account-9'); +INSERT INTO "Contact" VALUES('Contact-100','Chase','Wilson','Dr.','mdonaldson@gmail.com','(835)291-0076x88366','8748248647','Therapist, sports','1994-10-06','Account-17'); +COMMIT; \ No newline at end of file diff --git a/datasets/select/similarity_mapping.yml b/datasets/select/similarity_mapping.yml new file mode 100644 index 0000000000..d7b99339ca --- /dev/null +++ b/datasets/select/similarity_mapping.yml @@ -0,0 +1,22 @@ +Account: + sf_object: Account + api: rest + fields: + - Name + - Description + - Phone + - AccountNumber + action: select + select_options: + strategy: similarity + # threshold: 0.3 + +Contact: + sf_object: Contact + api: rest + fields: + - LastName + - Email + lookups: + AccountId: + table: Account diff --git a/datasets/select/similarity_sample.sql b/datasets/select/similarity_sample.sql new file mode 100644 index 0000000000..f3a6d88f2c --- /dev/null +++ b/datasets/select/similarity_sample.sql @@ -0,0 +1,49 @@ +BEGIN TRANSACTION; +CREATE TABLE "Account" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "Description" VARCHAR(255), + "NumberOfEmployees" VARCHAR(255), + "BillingStreet" VARCHAR(255), + "BillingCity" VARCHAR(255), + "BillingState" VARCHAR(255), + "BillingPostalCode" VARCHAR(255), + "BillingCountry" VARCHAR(255), + "ShippingStreet" VARCHAR(255), + "ShippingCity" VARCHAR(255), + "ShippingState" VARCHAR(255), + "ShippingPostalCode" VARCHAR(255), + "ShippingCountry" VARCHAR(255), + "Phone" VARCHAR(255), + "Fax" VARCHAR(255), + "Website" VARCHAR(255), + "AccountNumber" VARCHAR(255), + "ParentId" VARCHAR(255), + PRIMARY KEY (id) +); + +INSERT INTO "Account" VALUES('Account-1','Tom Cruise','Some Description','','','','','','','','','','','','12345632','','','123',''); +INSERT INTO "Account" VALUES('Account-2','Bob The Builder','Some Description','','','','','','','','','','','','12345632','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-3','Shah Rukh Khan','Bollywood actor','','','','','','','','','','','','12345612','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-4','Aamir Khan','Mr perfectionist, bollywood actor','','','','','','','','','','','','12345623','','','123','Account-1'); +INSERT INTO "Account" VALUES('Account-5','Salman Khan','Mr perfectionist, bollywood actor','','','','','','','','','','','','12345623','','','123','Account-1'); + + +CREATE TABLE "Contact" ( + id VARCHAR(255) NOT NULL, + "FirstName" VARCHAR(255), + "LastName" VARCHAR(255), + "Salutation" VARCHAR(255), + "Email" VARCHAR(255), + "Phone" VARCHAR(255), + "MobilePhone" VARCHAR(255), + "Title" VARCHAR(255), + "Birthdate" VARCHAR(255), + "AccountId" VARCHAR(255), + PRIMARY KEY (id) +); + + +INSERT INTO "Contact" VALUES('Contact-1','Mr','Contact of Tom Cruise','','','','','','','Account-1'); +INSERT INTO "Contact" VALUES('Contact-2','Test','Contact of Bob the Builder','','','','','','','Account-2'); +INSERT INTO "Contact" VALUES('Contact-3','Another','Contact of SRK','','','','','','','Account-3'); diff --git a/datasets/select/similarity_select_insert_mapping.yml b/datasets/select/similarity_select_insert_mapping.yml new file mode 100644 index 0000000000..268bb9bda3 --- /dev/null +++ b/datasets/select/similarity_select_insert_mapping.yml @@ -0,0 +1,47 @@ +Account: + sf_object: Account + api: rest + fields: + - Name + - Description + - Phone + - AccountNumber + lookups: + ParentId: + table: Account + +Contact: + sf_object: Contact + api: rest + fields: + - LastName + - Email + lookups: + AccountId: + table: Account + +Lead: + sf_object: Lead + api: rest + fields: + - LastName + - Company + +Event: + sf_object: Event + api: rest + action: select + select_options: + strategy: similarity + threshold: 0.1 + fields: + - Subject + - DurationInMinutes + - ActivityDateTime + lookups: + WhoId: + table: + - Contact + - Lead + WhatId: + table: Account diff --git a/datasets/select/similarity_select_insert_mapping_bulk.yml b/datasets/select/similarity_select_insert_mapping_bulk.yml new file mode 100644 index 0000000000..011ad57af9 --- /dev/null +++ b/datasets/select/similarity_select_insert_mapping_bulk.yml @@ -0,0 +1,47 @@ +Account: + sf_object: Account + api: rest + fields: + - Name + - Description + - Phone + - AccountNumber + lookups: + ParentId: + table: Account + +Contact: + sf_object: Contact + api: rest + fields: + - LastName + - Email + lookups: + AccountId: + table: Account + +Lead: + sf_object: Lead + api: rest + fields: + - LastName + - Company + +Event: + sf_object: Event + api: bulk + action: select + select_options: + strategy: similarity + threshold: 0.1 + fields: + - Subject + - DurationInMinutes + - ActivityDateTime + lookups: + WhoId: + table: + - Contact + - Lead + WhatId: + table: Account diff --git a/datasets/select/similarity_select_insert_sample.sql b/datasets/select/similarity_select_insert_sample.sql new file mode 100644 index 0000000000..0c62a5b870 --- /dev/null +++ b/datasets/select/similarity_select_insert_sample.sql @@ -0,0 +1,62 @@ +BEGIN TRANSACTION; +CREATE TABLE "Account" ( + id VARCHAR(255) NOT NULL, + "Name" VARCHAR(255), + "Description" VARCHAR(255), + "NumberOfEmployees" VARCHAR(255), + "BillingStreet" VARCHAR(255), + "BillingCity" VARCHAR(255), + "BillingState" VARCHAR(255), + "BillingPostalCode" VARCHAR(255), + "BillingCountry" VARCHAR(255), + "ShippingStreet" VARCHAR(255), + "ShippingCity" VARCHAR(255), + "ShippingState" VARCHAR(255), + "ShippingPostalCode" VARCHAR(255), + "ShippingCountry" VARCHAR(255), + "Phone" VARCHAR(255), + "Fax" VARCHAR(255), + "Website" VARCHAR(255), + "AccountNumber" VARCHAR(255), + "ParentId" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "Account" VALUES('Account-1','Tom Cruise','Some Description','','','','','','','','','','','','123456','','','123',''); +INSERT INTO "Account" VALUES('Account-2','Bob The Builder','Some Description','','','','','','','','','','','','123456','','','123','Account-1'); +CREATE TABLE "Contact" ( + id VARCHAR(255) NOT NULL, + "FirstName" VARCHAR(255), + "LastName" VARCHAR(255), + "Salutation" VARCHAR(255), + "Email" VARCHAR(255), + "Phone" VARCHAR(255), + "MobilePhone" VARCHAR(255), + "Title" VARCHAR(255), + "Birthdate" VARCHAR(255), + "AccountId" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "Contact" VALUES('Contact-1','Test','Contact','','','','','','','Account-1'); +INSERT INTO "Contact" VALUES('Contact-2','Test','Contact','','','','','','','Account-2'); +INSERT INTO "Contact" VALUES('Contact-3','Another','Contact','','','','','','','Account-3'); +CREATE TABLE "Lead" ( + id VARCHAR(255) NOT NULL, + "LastName" VARCHAR(255), + "Company" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "Lead" VALUES('Lead-1','First Lead','Salesforce'); +INSERT INTO "Lead" VALUES('Lead-2','Second Lead','Salesforce'); +CREATE TABLE "Event" ( + id VARCHAR(255) NOT NULL, + "Subject" VARCHAR(255), + "ActivityDateTime" VARCHAR(255), + "DurationInMinutes" VARCHAR(255), + "WhoId" VARCHAR(255), + "WhatId" VARCHAR(255), + PRIMARY KEY (id) +); +INSERT INTO "Event" VALUES('Event-1','Test Event 1', '2024-11-07T07:00:00.000+0000', '60','Contact-1','Account-1'); +INSERT INTO "Event" VALUES('Event-2','Test Event 2', '2024-11-07T07:00:00.000+0000', '60','Contact-1',''); +INSERT INTO "Event" VALUES('Event-3','third record!!!!!!!!', '2024-11-07T07:00:00.000+0000', '31','Contact-2','Account-1'); +COMMIT; \ No newline at end of file diff --git a/datasets/select/standard_mapping.yml b/datasets/select/standard_mapping.yml new file mode 100644 index 0000000000..0a96561600 --- /dev/null +++ b/datasets/select/standard_mapping.yml @@ -0,0 +1,22 @@ +Account: + sf_object: Account + api: rest + fields: + - Name + - Description + - Phone + - AccountNumber + action: select + select_options: + strategy: standard + # threshold: 0.3 + +Contact: + sf_object: Contact + api: rest + fields: + - LastName + - Email + lookups: + AccountId: + table: Account From 0cd87f39eada08a048c00a7d119b676e503e9a60 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Wed, 27 Nov 2024 09:48:08 +0530 Subject: [PATCH 50/65] Remove unused imports from test_select.py --- cumulusci/tasks/bulkdata/tests/test_select.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/cumulusci/tasks/bulkdata/tests/test_select.py b/cumulusci/tasks/bulkdata/tests/test_select.py index 6512c829b0..1bb13a4cca 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select.py +++ b/cumulusci/tasks/bulkdata/tests/test_select.py @@ -1,14 +1,6 @@ -import re -from unittest import mock - import pytest -import responses -import yaml -from cumulusci.core.exceptions import BulkDataException from cumulusci.tasks.bulkdata import LoadData -from cumulusci.tasks.bulkdata.step import DataApi, DataOperationStatus -from cumulusci.tests.util import CURRENT_SF_API_VERSION, mock_describe_calls class TestSelect: From 2bd015e7cf70387f8dddfcb88bb06df8197c91ed Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Wed, 27 Nov 2024 11:26:17 +0530 Subject: [PATCH 51/65] Add line to mention below is select operation --- docs/data.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/data.md b/docs/data.md index 9badb404e8..fe9396a4ae 100644 --- a/docs/data.md +++ b/docs/data.md @@ -256,6 +256,8 @@ versa. The `select` functionality is designed to streamline the mapping process by enabling the selection of specific records directly from Salesforce for lookups. This feature is particularly useful when dealing with non-insertable Salesforce objects and ensures that pre-existing records are used rather than inserting new ones. The selection process is highly customizable with various strategies, filters, and additional capabilities that provide flexibility and precision in data mapping. +The following is an example of a `mapping.yaml` file where the `Event` sObject utilizes the `select` action: + ```yaml Account: sf_object: Account From 44e80b256d6d8eb3cacbe51763a3401472f2f7ae Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Thu, 28 Nov 2024 16:03:03 +0530 Subject: [PATCH 52/65] Fix for numerical feature category bug --- cumulusci/tasks/bulkdata/select_utils.py | 43 +++++----- cumulusci/tasks/bulkdata/step.py | 4 + .../tasks/bulkdata/tests/test_select_utils.py | 83 +++++++++---------- 3 files changed, 62 insertions(+), 68 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 7412a38ae4..2d2728dadb 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -352,9 +352,6 @@ def annoy_post_process( insertion_candidates = load_shaped_records return selected_records, insertion_candidates - query_records = replace_empty_strings_with_missing(query_records) - select_shaped_records = replace_empty_strings_with_missing(select_shaped_records) - hash_features = 100 num_trees = 10 @@ -589,7 +586,7 @@ def add_limit_offset_to_user_filter( return f" {filter_clause}" -def determine_field_types(df, weights): +def determine_field_types(df_db, df_query, weights): numerical_features = [] boolean_features = [] categorical_features = [] @@ -598,23 +595,35 @@ def determine_field_types(df, weights): boolean_weights = [] categorical_weights = [] - for col, weight in zip(df.columns, weights): + for col, weight in zip(df_db.columns, weights): # Check if the column can be converted to numeric try: - # Attempt to convert to numeric - df[col] = pd.to_numeric(df[col], errors="raise") + temp_df_db = pd.to_numeric(df_db[col], errors="raise") + temp_df_query = pd.to_numeric(df_query[col], errors="raise") + # Replace empty values with 0 for numerical features + df_db[col] = temp_df_db.fillna(0).replace("", 0) + df_query[col] = temp_df_query.fillna(0).replace("", 0) numerical_features.append(col) numerical_weights.append(weight) except ValueError: # Check for boolean values - if df[col].str.lower().isin(["true", "false"]).all(): + if ( + df_db[col].str.lower().isin(["true", "false"]).all() + and df_query[col].str.lower().isin(["true", "false"]).all() + ): # Map to actual boolean values - df[col] = df[col].str.lower().map({"true": True, "false": False}) + df_db[col] = df_db[col].str.lower().map({"true": True, "false": False}) + df_query[col] = ( + df_query[col].str.lower().map({"true": True, "false": False}) + ) boolean_features.append(col) boolean_weights.append(weight) else: categorical_features.append(col) categorical_weights.append(weight) + # Replace empty values with 'missing' for categorical features + df_db[col] = df_db[col].replace("", "missing") + df_query[col] = df_query[col].replace("", "missing") return ( numerical_features, @@ -640,14 +649,7 @@ def vectorize_records(db_records, query_records, hash_features, weights): numerical_weights, boolean_weights, categorical_weights, - ) = determine_field_types(df_db, weights) - - # Modify query dataframe boolean columns to True or False - for col in df_query.columns: - if df_query[col].str.lower().isin(["true", "false"]).all(): - df_query[col] = ( - df_query[col].str.lower().map({"true": True, "false": False}) - ) + ) = determine_field_types(df_db, df_query, weights) # Fit StandardScaler on the numerical features of the database records scaler = StandardScaler() @@ -705,13 +707,6 @@ def vectorize_records(db_records, query_records, hash_features, weights): return final_db_vectors, final_query_vectors -def replace_empty_strings_with_missing(records): - return [ - [(field if field != "" else "missing") for field in record] - for record in records - ] - - def split_and_filter_fields(fields: T.List[str]) -> T.Tuple[T.List[str], T.List[str]]: # List to store non-lookup fields (load fields) load_fields = [] diff --git a/cumulusci/tasks/bulkdata/step.py b/cumulusci/tasks/bulkdata/step.py index b2a13bf966..9dbbe40cd7 100644 --- a/cumulusci/tasks/bulkdata/step.py +++ b/cumulusci/tasks/bulkdata/step.py @@ -478,9 +478,11 @@ def select_records(self, records): ) # Execute the main select query using Bulk API + self.logger.info("Retrieving records from org...") select_query_records = self._execute_select_query( select_query=select_query, query_fields=query_fields ) + self.logger.info(f"Retrieved {len(select_query_records)} from org") query_records.extend(select_query_records) # Post-process the query results @@ -895,7 +897,9 @@ def select_records(self, records): ) # Execute the query and gather the records + self.logger.info("Retrieving records from org...") query_records = self._execute_soql_query(select_query, query_fields) + self.logger.info(f"Retrieved {len(query_records)} from org") # Post-process the query results for this batch ( diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index a0b5a3fcad..fb77abcf9b 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -11,7 +11,6 @@ find_closest_record, levenshtein_distance, reorder_records, - replace_empty_strings_with_missing, split_and_filter_fields, vectorize_records, ) @@ -485,43 +484,9 @@ def test_calculate_levenshtein_distance_weights_length_doesnt_match(): assert "Records must be same size as fields (weights)." in str(e.value) -def test_replace_empty_strings_with_missing(): - # Case 1: Normal case with some empty strings - records = [ - ["Alice", "", "New York"], - ["Bob", "Engineer", ""], - ["", "Teacher", "Chicago"], - ] - expected = [ - ["Alice", "missing", "New York"], - ["Bob", "Engineer", "missing"], - ["missing", "Teacher", "Chicago"], - ] - assert replace_empty_strings_with_missing(records) == expected - - # Case 2: No empty strings, so the output should be the same as input - records = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] - expected = [["Alice", "Manager", "New York"], ["Bob", "Engineer", "San Francisco"]] - assert replace_empty_strings_with_missing(records) == expected - - # Case 3: List with all empty strings - records = [["", "", ""], ["", "", ""]] - expected = [["missing", "missing", "missing"], ["missing", "missing", "missing"]] - assert replace_empty_strings_with_missing(records) == expected - - # Case 4: Empty list (should return an empty list) - records = [] - expected = [] - assert replace_empty_strings_with_missing(records) == expected - - # Case 5: List with some empty sublists - records = [[], ["Alice", ""], []] - expected = [[], ["Alice", "missing"], []] - assert replace_empty_strings_with_missing(records) == expected - - def test_all_numeric_columns(): - df = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) + df_db = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) + df_query = pd.DataFrame({"A": [4, 5, ""], "B": [4.5, 5.5, 6.5]}) weights = [0.1, 0.2] expected_output = ( ["A", "B"], # numerical_features @@ -531,11 +496,31 @@ def test_all_numeric_columns(): [], # boolean_weights [], # categorical_weights ) - assert determine_field_types(df, weights) == expected_output + assert determine_field_types(df_db, df_query, weights) == expected_output + + +def test_numeric_columns__one_non_numeric(): + df_db = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) + df_query = pd.DataFrame({"A": [4, 5, 6], "B": ["abcd", 5.5, 6.5]}) + weights = [0.1, 0.2] + expected_output = ( + ["A"], # numerical_features + [], # boolean_features + [], # categorical_features + [0.1], # numerical_weights + [], # boolean_weights + [], # categorical_weights + ) + assert determine_field_types(df_db, df_query, weights) == expected_output def test_all_boolean_columns(): - df = pd.DataFrame({"A": ["true", "false", "true"], "B": ["false", "true", "false"]}) + df_db = pd.DataFrame( + {"A": ["true", "false", "true"], "B": ["false", "true", "false"]} + ) + df_query = pd.DataFrame( + {"A": ["true", "false", "true"], "B": ["false", "true", "false"]} + ) weights = [0.3, 0.4] expected_output = ( [], # numerical_features @@ -545,13 +530,16 @@ def test_all_boolean_columns(): [0.3, 0.4], # boolean_weights [], # categorical_weights ) - assert determine_field_types(df, weights) == expected_output + assert determine_field_types(df_db, df_query, weights) == expected_output def test_all_categorical_columns(): - df = pd.DataFrame( + df_db = pd.DataFrame( {"A": ["apple", "banana", "cherry"], "B": ["dog", "cat", "mouse"]} ) + df_query = pd.DataFrame( + {"A": ["banana", "apple", "cherry"], "B": ["cat", "dog", "mouse"]} + ) weights = [0.5, 0.6] expected_output = ( [], # numerical_features @@ -561,17 +549,24 @@ def test_all_categorical_columns(): [], # boolean_weights [0.5, 0.6], # categorical_weights ) - assert determine_field_types(df, weights) == expected_output + assert determine_field_types(df_db, df_query, weights) == expected_output def test_mixed_types(): - df = pd.DataFrame( + df_db = pd.DataFrame( { "A": [1, 2, 3], "B": ["true", "false", "true"], "C": ["apple", "banana", "cherry"], } ) + df_query = pd.DataFrame( + { + "A": [1, 3, ""], + "B": ["true", "true", "true"], + "C": ["apple", "", 3], + } + ) weights = [0.7, 0.8, 0.9] expected_output = ( ["A"], # numerical_features @@ -581,7 +576,7 @@ def test_mixed_types(): [0.8], # boolean_weights [0.9], # categorical_weights ) - assert determine_field_types(df, weights) == expected_output + assert determine_field_types(df_db, df_query, weights) == expected_output def test_vectorize_records_mixed_numerical_boolean_categorical(): From fee284b156f5ee15ff0b093494f0bde4ce131f4f Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Mon, 2 Dec 2024 15:27:43 +0530 Subject: [PATCH 53/65] Fix test error --- .../tasks/bulkdata/tests/test_select_utils.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index fb77abcf9b..6460f18bdc 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -485,8 +485,8 @@ def test_calculate_levenshtein_distance_weights_length_doesnt_match(): def test_all_numeric_columns(): - df_db = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) - df_query = pd.DataFrame({"A": [4, 5, ""], "B": [4.5, 5.5, 6.5]}) + df_db = pd.DataFrame({"A": ["1", "2", "3"], "B": ["4.5", " 5.5", "6.5"]}) + df_query = pd.DataFrame({"A": ["4", "5", ""], "B": ["4.5", "5.5", "6.5"]}) weights = [0.1, 0.2] expected_output = ( ["A", "B"], # numerical_features @@ -500,16 +500,16 @@ def test_all_numeric_columns(): def test_numeric_columns__one_non_numeric(): - df_db = pd.DataFrame({"A": [1, 2, 3], "B": [4.5, 5.5, 6.5]}) - df_query = pd.DataFrame({"A": [4, 5, 6], "B": ["abcd", 5.5, 6.5]}) + df_db = pd.DataFrame({"A": ["1", "2", "3"], "B": ["4.5", "5.5", "6.5"]}) + df_query = pd.DataFrame({"A": ["4", "5", "6"], "B": ["abcd", "5.5", "6.5"]}) weights = [0.1, 0.2] expected_output = ( ["A"], # numerical_features [], # boolean_features - [], # categorical_features + ["B"], # categorical_features [0.1], # numerical_weights [], # boolean_weights - [], # categorical_weights + [0.2], # categorical_weights ) assert determine_field_types(df_db, df_query, weights) == expected_output @@ -555,16 +555,16 @@ def test_all_categorical_columns(): def test_mixed_types(): df_db = pd.DataFrame( { - "A": [1, 2, 3], + "A": ["1", "2", "3"], "B": ["true", "false", "true"], "C": ["apple", "banana", "cherry"], } ) df_query = pd.DataFrame( { - "A": [1, 3, ""], + "A": ["1", "3", ""], "B": ["true", "true", "true"], - "C": ["apple", "", 3], + "C": ["apple", "", "3"], } ) weights = [0.7, 0.8, 0.9] From d67fc6b44b35daec8354f71b2156ebe4c22d85af Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Tue, 3 Dec 2024 15:12:13 +0530 Subject: [PATCH 54/65] Fix issue where zero threshold was selecting everything. Added tests as well --- cumulusci/tasks/bulkdata/select_utils.py | 4 +- cumulusci/tasks/bulkdata/tests/test_step.py | 189 +++++++++++++++++++- 2 files changed, 189 insertions(+), 4 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index 2d2728dadb..fedc1398bb 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -397,7 +397,7 @@ def annoy_post_process( # Retrieve the corresponding record from the database record = query_record_data[neighbor_index] closest_record_id = record_to_id_map[tuple(record)] - if threshold and (neighbor_distances[idx] >= threshold): + if threshold is not None and (neighbor_distances[idx] >= threshold): selected_records.append(None) insertion_candidates.append(load_shaped_records[i]) else: @@ -445,7 +445,7 @@ def levenshtein_post_process( select_record, target_records, similarity_weights ) - if distance_threshold and match_distance > distance_threshold: + if distance_threshold is not None and match_distance > distance_threshold: # Append load record for insertion if distance exceeds threshold insertion_candidates.append(load_record) selected_records.append(None) diff --git a/cumulusci/tasks/bulkdata/tests/test_step.py b/cumulusci/tasks/bulkdata/tests/test_step.py index e94e91f226..3887b270f3 100644 --- a/cumulusci/tasks/bulkdata/tests/test_step.py +++ b/cumulusci/tasks/bulkdata/tests/test_step.py @@ -1232,7 +1232,9 @@ def test_process_insert_records_failure(self, download_mock): ) @mock.patch("cumulusci.tasks.bulkdata.step.download_file") - def test_select_records_similarity_strategy__insert_records(self, download_mock): + def test_select_records_similarity_strategy__insert_records__non_zero_threshold( + self, download_mock + ): # Set up mock context and BulkApiDmlOperation context = mock.Mock() # Add step with threshold @@ -1325,6 +1327,102 @@ def test_select_records_similarity_strategy__insert_records(self, download_mock) == 1 ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") + def test_select_records_similarity_strategy__insert_records__zero_threshold( + self, download_mock + ): + # Set up mock context and BulkApiDmlOperation + context = mock.Mock() + # Add step with threshold + step = BulkApiDmlOperation( + sobject="Contact", + operation=DataOperationType.QUERY, + api_options={"batch_size": 10, "update_key": "LastName"}, + context=context, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0, + ) + + # Mock Bulk API responses + step.bulk.endpoint = "https://test" + step.bulk.create_query_job.return_value = "JOB" + step.bulk.query.return_value = "BATCH" + step.bulk.get_query_batch_result_ids.return_value = ["RESULT"] + + # Mock the downloaded CSV content with a single record + select_results = io.StringIO( + """[{"Id":"003000000000001", "Name":"Jawad", "Email":"mjawadtp@example.com"}]""" + ) + insert_results = io.StringIO( + "Id,Success,Created\n003000000000002,true,true\n003000000000003,true,true\n" + ) + download_mock.side_effect = [select_results, insert_results] + + # Mock the _wait_for_job method to simulate a successful job + step._wait_for_job = mock.Mock() + step._wait_for_job.return_value = DataOperationJobResult( + DataOperationStatus.SUCCESS, [], 0, 0 + ) + + # Prepare input records + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom", "cruise@example.com"], + ] + ) + + # Mock sub-operation for BulkApiDmlOperation + insert_step = mock.Mock(spec=BulkApiDmlOperation) + insert_step.start = mock.Mock() + insert_step.load_records = mock.Mock() + insert_step.end = mock.Mock() + insert_step.batch_ids = ["BATCH1"] + insert_step.bulk = mock.Mock() + insert_step.bulk.endpoint = "https://test" + insert_step.job_id = "JOB" + + with mock.patch( + "cumulusci.tasks.bulkdata.step.BulkApiDmlOperation", + return_value=insert_step, + ): + # Execute the select_records operation + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + @mock.patch("cumulusci.tasks.bulkdata.step.download_file") def test_select_records_similarity_strategy__insert_records__no_select_records( self, download_mock @@ -2807,7 +2905,9 @@ def test_process_insert_records_failure(self): mock_rest_api_dml_operation.end.assert_not_called() @responses.activate - def test_select_records_similarity_strategy__insert_records(self): + def test_select_records_similarity_strategy__insert_records__non_zero_threshold( + self, + ): mock_describe_calls() task = _make_task( LoadData, @@ -2891,6 +2991,91 @@ def test_select_records_similarity_strategy__insert_records(self): == 1 ) + @responses.activate + def test_select_records_similarity_strategy__insert_records__zero_threshold(self): + mock_describe_calls() + task = _make_task( + LoadData, + { + "options": { + "database_url": "sqlite:///test.db", + "mapping": "mapping.yml", + } + }, + ) + task.project_config.project__package__api_version = CURRENT_SF_API_VERSION + task._init_task() + + # Create step with threshold + step = RestApiDmlOperation( + sobject="Contact", + operation=DataOperationType.UPSERT, + api_options={"batch_size": 10}, + context=task, + fields=["Name", "Email"], + selection_strategy=SelectStrategy.SIMILARITY, + threshold=0, + ) + + results_select_call = { + "records": [ + { + "Id": "003000000000001", + "Name": "Jawad", + "Email": "mjawadtp@example.com", + }, + ], + "done": True, + } + + results_insert_call = [ + {"id": "003000000000002", "success": True, "created": True}, + {"id": "003000000000003", "success": True, "created": True}, + ] + + step.sf.restful = mock.Mock( + side_effect=[results_select_call, results_insert_call] + ) + records = iter( + [ + ["Jawad", "mjawadtp@example.com"], + ["Aditya", "aditya@example.com"], + ["Tom Cruise", "tom@example.com"], + ] + ) + step.start() + step.select_records(records) + step.end() + + # Get the results and assert their properties + results = list(step.get_results()) + assert len(results) == 3 # Expect 3 results (matching the input records count) + # Assert that all results have the expected ID, success, and created values + assert ( + results.count( + DataOperationResult( + id="003000000000001", success=True, error="", created=False + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000002", success=True, error="", created=True + ) + ) + == 1 + ) + assert ( + results.count( + DataOperationResult( + id="003000000000003", success=True, error="", created=True + ) + ) + == 1 + ) + @responses.activate def test_insert_dml_operation__boolean_conversion(self): mock_describe_calls() From 865ad4936bd57b8dfc436a30375328a847d19147 Mon Sep 17 00:00:00 2001 From: lakshmi2506 <141401869+lakshmi2506@users.noreply.github.com> Date: Thu, 12 Dec 2024 00:38:55 +0530 Subject: [PATCH 55/65] @W-17366392: Fix Omnistudio issues for the sf command format (#3855) --- cumulusci/tasks/vlocity/vlocity.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cumulusci/tasks/vlocity/vlocity.py b/cumulusci/tasks/vlocity/vlocity.py index c1d0f68085..7c772b97d1 100644 --- a/cumulusci/tasks/vlocity/vlocity.py +++ b/cumulusci/tasks/vlocity/vlocity.py @@ -27,7 +27,7 @@ LWC_RSS_NAME = "OmniStudioLightning" OMNI_NAMESPACE = "omnistudio" VBT_SF_ALIAS = "cci-vbt-target" -SF_TOKEN_ENV = "SFDX_ACCESS_TOKEN" +SF_TOKEN_ENV = "SF_ACCESS_TOKEN" VBT_TOKEN_ENV = "OMNIOUT_TOKEN" @@ -106,7 +106,9 @@ def _add_token_to_sfdx(self, access_token: str, instance_url: str) -> str: # TODO: Use the sf v2 form of this command instead (when we migrate) token_store_cmd = [ "sf", - "org login access-token", + "org", + "login", + "access-token", "--no-prompt", "--alias", f"{VBT_SF_ALIAS}", From be5c0bb3f92bc45c1eb54dfc6f1bd04b712328a9 Mon Sep 17 00:00:00 2001 From: James Estevez Date: Wed, 11 Dec 2024 22:28:37 -0800 Subject: [PATCH 56/65] fix: restore task and flow reference docs (#3856) fixes #3848 --- .readthedocs.yml | 4 +++- docs/reference.md | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 4f0f038758..fa07e9b997 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -14,7 +14,9 @@ build: - asdf plugin add uv - asdf install uv latest - asdf global uv latest - - uv sync --only-group docs --frozen + - uv sync --group docs --frozen + - uv run cci task doc --write + - uv run cci flow doc > docs/flows.rst - uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx diff --git a/docs/reference.md b/docs/reference.md index be688a4590..892f81dcb2 100644 --- a/docs/reference.md +++ b/docs/reference.md @@ -6,7 +6,7 @@ maxdepth: 1 --- cheat-sheet -tasks +cumulusci_tasks flows env_var_reference ``` From 2a704dff61bc7832cc7af26f2539c39c3fc4a5a9 Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Thu, 12 Dec 2024 23:36:56 +0530 Subject: [PATCH 57/65] Fix for no records inserted when no records in target and threshold 0 --- cumulusci/tasks/bulkdata/select_utils.py | 2 +- .../tasks/bulkdata/tests/test_select_utils.py | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index fedc1398bb..b15389402b 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -292,7 +292,7 @@ def similarity_post_process( ]: """Processes the query results for the similarity selection strategy""" # Handle case where query returns 0 records - if not query_records and not threshold: + if not query_records and threshold is None: error_message = f"No records found for {sobject} in the target org." return [], [], error_message diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 6460f18bdc..447cdccef6 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -403,6 +403,33 @@ def test_similarity_post_process_with_no_records(): assert error_message == f"No records found for {sobject} in the target org." +def test_similarity_post_process_with_no_records__zero_threshold(): + select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) + load_records = [["Aditya", "Salesforce"], ["Jawad", "Salesforce"]] + query_records = [] + num_records = 2 + sobject = "Lead" + ( + selected_records, + insert_records, + error_message, + ) = select_operator.select_post_process( + load_records=load_records, + query_records=query_records, + num_records=num_records, + sobject=sobject, + weights=[1, 1, 1], + fields=["LastName", "Company"], + threshold=0, + ) + + # Assert that it inserts everything + assert selected_records == [None, None] + assert insert_records[0] == ["Aditya", "Salesforce"] + assert insert_records[1] == ["Jawad", "Salesforce"] + assert error_message is None + + def test_calculate_levenshtein_distance_basic(): record1 = ["hello", "world"] record2 = ["hullo", "word"] From 534210c11037cf36b810fa59b69a6d9fc98e9915 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 13:23:37 -0800 Subject: [PATCH 58/65] Release v4.0.1.dev0 (#3860) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: James Estevez --- cumulusci/__about__.py | 2 +- docs/history.md | 123 +++++++++++++++++++++++------------------ 2 files changed, 71 insertions(+), 54 deletions(-) diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index 76ad18b89a..0f6c94fb0b 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "4.0.1" +__version__ = "4.0.1.dev0" diff --git a/docs/history.md b/docs/history.md index 5ff75547d1..064fcf740c 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,6 +2,25 @@ +## v4.0.1.dev0 (2024-12-16) + + + +## What's Changed + +### Changes 🎉 + +- @W-16485311: Core Logic for Selecting Records from Target Org by [@aditya-balachander](https://github.com/aditya-balachander) in [#3818](https://github.com/SFDO-Tooling/CumulusCI/pull/3818) +- Add integration tests for all selection strategies by [@mjawadtp](https://github.com/mjawadtp) in [#3851](https://github.com/SFDO-Tooling/CumulusCI/pull/3851) +- @W-17357226: Fix for issue where zero threshold defaulted to select by [@aditya-balachander](https://github.com/aditya-balachander) in [#3853](https://github.com/SFDO-Tooling/CumulusCI/pull/3853) +- @W-17366392: Fix Omnistudio issues for the sf command format by [@lakshmi2506](https://github.com/lakshmi2506) in [#3855](https://github.com/SFDO-Tooling/CumulusCI/pull/3855) +- fix: restore task and flow reference docs by [@jstvz](https://github.com/jstvz) in [#3856](https://github.com/SFDO-Tooling/CumulusCI/pull/3856) +- @W-17412267: Fix for records not being inserted when threshold 0 by [@aditya-balachander](https://github.com/aditya-balachander) in [#3857](https://github.com/SFDO-Tooling/CumulusCI/pull/3857) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1...v4.0.1.dev0 + + + ## v4.0.1 (2024-11-18) ### Issues Fixed 🩴 @@ -10,8 +29,6 @@ **Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.0...v4.0.1 - - ## v4.0.0 (2024-11-12) ## What's Changed @@ -1694,9 +1711,9 @@ Critical Changes: subfolders will see a change in resolution behavior. Previously, a dependency specified like this: - dependencies: - - github: https://github.com/SalesforceFoundation/NPSP - subfolder: unpackaged/config/trial + dependencies: + - github: https://github.com/SalesforceFoundation/NPSP + subfolder: unpackaged/config/trial would always deploy from the latest commit on the default branch. Now, this dependency will be resolved to a GitHub commit @@ -1707,12 +1724,12 @@ Critical Changes: - The `project__dependencies` section in `cumulusci.yml` no longer supports nested dependencies specified like this: - dependencies: - - namespace: "test" - version: "1.0" dependencies: - - namespace: "parent" - version: "2.2" + - namespace: "test" + version: "1.0" + dependencies: + - namespace: "parent" + version: "2.2" All dependencies should be listed in install order. @@ -1881,12 +1898,12 @@ Critical changes: - The `project__dependencies` section in `cumulusci.yml` will no longer support nested dependencies specified like this : - dependencies: - - namespace: "test" - version: "1.0" - dependencies: - - namespace: "parent" - version: "2.2" + dependencies: + - namespace: "test" + version: "1.0" + dependencies: + - namespace: "parent" + version: "2.2" All dependencies should be listed in install order. @@ -3493,33 +3510,33 @@ New features: : - - Added keywords for generating a collection of sObjects according to a template: + Added keywords for generating a collection of sObjects according to a template: - : - `Generate Test Data` - - `Salesforce Collection Insert` - - `Salesforce Collection Update` + : - `Generate Test Data` + - `Salesforce Collection Insert` + - `Salesforce Collection Update` - - + - - Changes to Page Objects: + Changes to Page Objects: - : - More than one page object can be loaded at once. - Once loaded, the keywords of a page object remain - visible in the suite. Robot will give priority to - keywords in the reverse order in which they were - imported. - - There is a new keyword, `Log Current Page Object`, - which can be useful to see information about the - most recently loaded page object. - - There is a new keyword, `Get Page Object`, which - will return the robot library for a given page - object. This can be used in other keywords to access - keywords from another page object if necessary. - - The `Go To Page` keyword will now automatically load - the page object for the given page. + : - More than one page object can be loaded at once. + Once loaded, the keywords of a page object remain + visible in the suite. Robot will give priority to + keywords in the reverse order in which they were + imported. + - There is a new keyword, `Log Current Page Object`, + which can be useful to see information about the + most recently loaded page object. + - There is a new keyword, `Get Page Object`, which + will return the robot library for a given page + object. This can be used in other keywords to access + keywords from another page object if necessary. + - The `Go To Page` keyword will now automatically load + the page object for the given page. - - Added a basic debugger for Robot tests. It can be enabled - using the `-o debug True` option to the robot task. + - Added a basic debugger for Robot tests. It can be enabled + using the `-o debug True` option to the robot task. - Added support for deploying new metadata types `ProfilePasswordPolicy` and `ProfileSessionSetting`. @@ -3594,8 +3611,8 @@ New features: permanently set this option, add this in `~/.cumulusci/cumulusci.yml`: - cli: - plain_output: True + cli: + plain_output: True - Added additional info to the `cci version` command, including the Python version, an upgrade check, and a warning on Python 2. @@ -4876,12 +4893,12 @@ Resolving a few issues from beta77: below. In flows that need to inject the actual namespace prefix, override the [unmanaged]{.title-ref} option .. : - custom_deploy_task: - class_path: cumulusci.tasks.salesforce.Deploy - options: - path: your/custom/metadata - namespace_inject: $project_config.project__package__namespace - unmanaged: False + custom_deploy_task: + class_path: cumulusci.tasks.salesforce.Deploy + options: + path: your/custom/metadata + namespace_inject: $project_config.project__package__namespace + unmanaged: False ### Enhancements @@ -5596,13 +5613,13 @@ Resolving a few issues from beta77: - **IMPORANT** This release changes the yaml structure for flows. The new structure now looks like this: - flows: - flow_name: - tasks: - 1: - task: deploy - 2: - task: run_tests + flows: + flow_name: + tasks: + 1: + task: deploy + 2: + task: run_tests - See the new flow customization examples in the cookbook for examples of why this change was made and how to use it: From 89a5b5ddb9b25bf22a199ab8ab93eb79c57f2afe Mon Sep 17 00:00:00 2001 From: aditya-balachander Date: Thu, 19 Dec 2024 09:57:09 +0530 Subject: [PATCH 59/65] @W-17427085: Set ANNOY related dependencies to be optional (#3858) Changes: - Remove `"annoy", "numpy", "pandas", "scikit-learn"` from dependencies under `pyproject.toml` and add them under optional dependencies - Created flag `OPTIONAL_DEPENDENCIES_AVAILABLE`, to indicate if ANNOY related dependencies are present in `select_utils.py`. If these optional dependencies are not available, for high volume of records (i.e. `complexity_constant >= 1000`), still Levenshtein Distance based selection will apply. - Skipped those pytests which have dependencies on `pandas` and ANNOY related optional dependencies under `test_select_utils.py` - Adding a warning message for non-zero similarity score when using ANNOY (for high volume of records). Updated the docs as well - Added additional workflow to run all unit tests with all optional dependencies installed --- .github/workflows/feature_test.yml | 24 +++++++++ cumulusci/tasks/bulkdata/select_utils.py | 33 +++++++++--- .../tasks/bulkdata/tests/test_select_utils.py | 50 ++++++++++++++++++- docs/data.md | 3 ++ pyproject.toml | 12 +++-- 5 files changed, 111 insertions(+), 11 deletions(-) diff --git a/.github/workflows/feature_test.yml b/.github/workflows/feature_test.yml index 9433041f85..001f4b9faf 100644 --- a/.github/workflows/feature_test.yml +++ b/.github/workflows/feature_test.yml @@ -63,6 +63,30 @@ jobs: - name: Run Pytest run: uv run pytest --cov-report= --cov=cumulusci + unit_tests_opt_deps: + name: "Unit tests with optional dependencies: ${{ matrix.os }}-${{ matrix.python-version }}" + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [macos-latest, SFDO-Tooling-Ubuntu, SFDO-Tooling-Windows] + python-version: ["3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "${{ matrix.python-version }}" + - name: Set up uv + uses: SFDO-Tooling/setup-uv@main + with: + version: "0.5.0" + enable-cache: true + - name: Install dependencies + run: uv sync --all-extras -p ${{ matrix.python-version }} + - name: Run Pytest + run: uv run pytest --cov-report= --cov=cumulusci + robot_api: name: "Robot: No browser" runs-on: SFDO-Tooling-Ubuntu diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index b15389402b..b37aa457ad 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -1,22 +1,37 @@ +import logging import random import re import typing as T from enum import Enum -import numpy as np -import pandas as pd -from annoy import AnnoyIndex from pydantic import Field, root_validator, validator -from sklearn.feature_extraction.text import HashingVectorizer -from sklearn.preprocessing import StandardScaler from cumulusci.core.enums import StrEnum from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( DEFAULT_DECLARATIONS, ) from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict +from cumulusci.utils import get_cci_upgrade_command from cumulusci.utils.yaml.model_parser import CCIDictModel +logger = logging.getLogger(__name__) +try: + import numpy as np + import pandas as pd + from annoy import AnnoyIndex + from sklearn.feature_extraction.text import HashingVectorizer + from sklearn.preprocessing import StandardScaler + + OPTIONAL_DEPENDENCIES_AVAILABLE = True +except ImportError: + logger.warning( + f"Optional dependencies are missing. " + "Handling high volumes of records for the 'select' functionality will be significantly slower, " + "as optimizations for this feature are currently disabled. " + f"To enable optimized performance, install all required dependencies using: {get_cci_upgrade_command()}[select]\n" + ) + OPTIONAL_DEPENDENCIES_AVAILABLE = False + class SelectStrategy(StrEnum): """Enum defining the different selection strategies requested.""" @@ -308,7 +323,7 @@ def similarity_post_process( select_records = [] insert_records = [] - if complexity_constant < 1000: + if complexity_constant < 1000 or not OPTIONAL_DEPENDENCIES_AVAILABLE: select_records, insert_records = levenshtein_post_process( load_records, query_records, fields, weights, threshold ) @@ -328,6 +343,12 @@ def annoy_post_process( threshold: T.Union[float, None], ) -> T.Tuple[T.List[dict], list]: """Processes the query results for the similarity selection strategy using Annoy algorithm for large number of records""" + # Add warning when threshold is 0 + if threshold is not None and threshold == 0: + logger.warning( + "Warning: A threshold of 0 may miss exact matches in high volumes. Use a small value like 0.1 for better accuracy." + ) + selected_records = [] insertion_candidates = [] diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 447cdccef6..589f66806a 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -1,7 +1,7 @@ -import pandas as pd import pytest from cumulusci.tasks.bulkdata.select_utils import ( + OPTIONAL_DEPENDENCIES_AVAILABLE, SelectOperationExecutor, SelectStrategy, add_limit_offset_to_user_filter, @@ -15,6 +15,14 @@ vectorize_records, ) +# Check for pandas availability +try: + import pandas as pd + + PANDAS_AVAILABLE = True +except ImportError: + PANDAS_AVAILABLE = False + # Test Cases for standard_generate_query def test_standard_generate_query_with_default_record_declaration(): @@ -511,6 +519,10 @@ def test_calculate_levenshtein_distance_weights_length_doesnt_match(): assert "Records must be same size as fields (weights)." in str(e.value) +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_all_numeric_columns(): df_db = pd.DataFrame({"A": ["1", "2", "3"], "B": ["4.5", " 5.5", "6.5"]}) df_query = pd.DataFrame({"A": ["4", "5", ""], "B": ["4.5", "5.5", "6.5"]}) @@ -526,6 +538,10 @@ def test_all_numeric_columns(): assert determine_field_types(df_db, df_query, weights) == expected_output +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_numeric_columns__one_non_numeric(): df_db = pd.DataFrame({"A": ["1", "2", "3"], "B": ["4.5", "5.5", "6.5"]}) df_query = pd.DataFrame({"A": ["4", "5", "6"], "B": ["abcd", "5.5", "6.5"]}) @@ -541,6 +557,10 @@ def test_numeric_columns__one_non_numeric(): assert determine_field_types(df_db, df_query, weights) == expected_output +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_all_boolean_columns(): df_db = pd.DataFrame( {"A": ["true", "false", "true"], "B": ["false", "true", "false"]} @@ -560,6 +580,10 @@ def test_all_boolean_columns(): assert determine_field_types(df_db, df_query, weights) == expected_output +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_all_categorical_columns(): df_db = pd.DataFrame( {"A": ["apple", "banana", "cherry"], "B": ["dog", "cat", "mouse"]} @@ -579,6 +603,10 @@ def test_all_categorical_columns(): assert determine_field_types(df_db, df_query, weights) == expected_output +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_mixed_types(): df_db = pd.DataFrame( { @@ -606,6 +634,10 @@ def test_mixed_types(): assert determine_field_types(df_db, df_query, weights) == expected_output +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_vectorize_records_mixed_numerical_boolean_categorical(): # Test data with mixed types: numerical and categorical only db_records = [["1.0", "true", "apple"], ["2.0", "false", "banana"]] @@ -633,6 +665,10 @@ def test_vectorize_records_mixed_numerical_boolean_categorical(): ), "Query vectors column count mismatch" +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_annoy_post_process(): # Test data load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] @@ -659,6 +695,10 @@ def test_annoy_post_process(): assert not insert_records +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_annoy_post_process__insert_records(): # Test data load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] @@ -714,6 +754,10 @@ def test_annoy_post_process__no_query_records(): ] # The first insert record should match the second load record +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_annoy_post_process__insert_records_with_polymorphic_fields(): # Test data load_records = [ @@ -749,6 +793,10 @@ def test_annoy_post_process__insert_records_with_polymorphic_fields(): ] # The first insert record should match the second load record +@pytest.mark.skipif( + not PANDAS_AVAILABLE or not OPTIONAL_DEPENDENCIES_AVAILABLE, + reason="requires optional dependencies for annoy", +) def test_single_record_match_annoy_post_process(): # Mock data where only the first query record matches the first load record load_records = [["Alice", "Engineer"], ["Bob", "Doctor"]] diff --git a/docs/data.md b/docs/data.md index fe9396a4ae..ba61076315 100644 --- a/docs/data.md +++ b/docs/data.md @@ -352,6 +352,9 @@ This parameter is **optional**; if not specified, no threshold will be applied a This feature is particularly useful during version upgrades, where records that closely match can be selected, while those that do not match sufficiently can be inserted into the target org. +**Important Note:** +For high volumes of records, an approximation algorithm is applied to improve performance. In such cases, setting a threshold of `0` may not guarantee the selection of exact matches, as the algorithm can assign a small non-zero similarity score to exact matches. To ensure accurate selection, it is recommended to set the threshold to a small value slightly greater than `0`, such as `0.1`. This ensures both precision and efficiency in the selection process. + --- #### Example diff --git a/pyproject.toml b/pyproject.toml index 7dec9eedab..d840b1eb9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ classifiers = [ "Programming Language :: Python :: 3.13", ] dependencies = [ - "annoy", "click>=8.1", "cryptography", "python-dateutil", @@ -35,8 +34,6 @@ dependencies = [ "defusedxml", "lxml", "MarkupSafe", - "numpy", - "pandas", "psutil", "pydantic<2", "PyJWT", @@ -53,7 +50,6 @@ dependencies = [ "rst2ansi>=0.1.5", "salesforce-bulk", "sarge", - "scikit-learn", "selenium<4", "simple-salesforce==1.11.4", "snowfakery>=4.0.0", @@ -88,6 +84,14 @@ lint = [ "pre-commit>=3.5.0", ] +[project.optional-dependencies] +select = [ + "annoy", + "numpy", + "pandas", + "scikit-learn", +] + [project.scripts] cci = "cumulusci.cli.cci:main" snowfakery = "snowfakery.cli:main" From 5b9d7b344ae6d7989f8f702a9257604899c9ca28 Mon Sep 17 00:00:00 2001 From: lakshmi2506 <141401869+lakshmi2506@users.noreply.github.com> Date: Thu, 19 Dec 2024 22:40:37 +0530 Subject: [PATCH 60/65] Update the documentation for SFDX_ORG_CREATE_ARGS environment variable (#3861) Co-authored-by: Bharath Chadarajupalli --- docs/env-var-reference.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/env-var-reference.md b/docs/env-var-reference.md index cc01c8a798..4d6e3fa738 100644 --- a/docs/env-var-reference.md +++ b/docs/env-var-reference.md @@ -70,3 +70,7 @@ org, e.g. a Dev Hub. Set with SFDX_CLIENT_ID. ## `SFDX_ORG_CREATE_ARGS` Extra arguments passed to `sf org create scratch`. + +To provide additional arguments, use the following format. For instance, to set the release to "preview", the environment variable would be: "--release=preview" + +To specify multiple options, you can include them together, like: "--edition=developer --release=preview" From 0fca7235e31e6e8a146f794d20baba21c365f634 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Fri, 3 Jan 2025 15:00:48 +0530 Subject: [PATCH 61/65] Remove default declaration for select rows query --- cumulusci/tasks/bulkdata/select_utils.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/cumulusci/tasks/bulkdata/select_utils.py b/cumulusci/tasks/bulkdata/select_utils.py index b37aa457ad..7835d8dea8 100644 --- a/cumulusci/tasks/bulkdata/select_utils.py +++ b/cumulusci/tasks/bulkdata/select_utils.py @@ -7,9 +7,6 @@ from pydantic import Field, root_validator, validator from cumulusci.core.enums import StrEnum -from cumulusci.tasks.bulkdata.extract_dataset_utils.hardcoded_default_declarations import ( - DEFAULT_DECLARATIONS, -) from cumulusci.tasks.bulkdata.utils import CaseInsensitiveDict from cumulusci.utils import get_cci_upgrade_command from cumulusci.utils.yaml.model_parser import CCIDictModel @@ -188,10 +185,6 @@ def standard_generate_query( filter_clause=user_filter, limit_clause=limit, offset_clause=offset ) else: - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(sobject) - if declaration: - query += f" WHERE {declaration.where}" query += f" LIMIT {limit}" if limit else "" query += f" OFFSET {offset}" if offset else "" return query, ["Id"] @@ -281,10 +274,6 @@ def similarity_generate_query( filter_clause=user_filter, limit_clause=limit, offset_clause=offset ) else: - # Get the WHERE clause from DEFAULT_DECLARATIONS if available - declaration = DEFAULT_DECLARATIONS.get(sobject) - if declaration: - query += f" WHERE {declaration.where}" query += f" LIMIT {limit}" if limit else "" query += f" OFFSET {offset}" if offset else "" From ed82f07cf0abc12745462fa8026d13bc38ad3619 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 10:03:59 +0530 Subject: [PATCH 62/65] Release v4.0.1.dev1 (#3866) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- cumulusci/__about__.py | 2 +- docs/history.md | 121 +++++++++++++++++++++++------------------ 2 files changed, 68 insertions(+), 55 deletions(-) diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index 0f6c94fb0b..486892be53 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "4.0.1.dev0" +__version__ = "4.0.1.dev1" diff --git a/docs/history.md b/docs/history.md index 064fcf740c..ab2bb0a133 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,6 +2,21 @@ +## v4.0.1.dev1 (2025-01-01) + + + +## What's Changed + +### Changes 🎉 + +- @W-17427085: Set ANNOY related dependencies to be optional by [@aditya-balachander](https://github.com/aditya-balachander) in [#3858](https://github.com/SFDO-Tooling/CumulusCI/pull/3858) +- Update on the documentation for SFDX_ORG_CREATE_ARGS environment variable by [@lakshmi2506](https://github.com/lakshmi2506) in [#3861](https://github.com/SFDO-Tooling/CumulusCI/pull/3861) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1.dev0...v4.0.1.dev1 + + + ## v4.0.1.dev0 (2024-12-16) @@ -19,8 +34,6 @@ **Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1...v4.0.1.dev0 - - ## v4.0.1 (2024-11-18) ### Issues Fixed 🩴 @@ -1711,9 +1724,9 @@ Critical Changes: subfolders will see a change in resolution behavior. Previously, a dependency specified like this: - dependencies: - - github: https://github.com/SalesforceFoundation/NPSP - subfolder: unpackaged/config/trial + dependencies: + - github: https://github.com/SalesforceFoundation/NPSP + subfolder: unpackaged/config/trial would always deploy from the latest commit on the default branch. Now, this dependency will be resolved to a GitHub commit @@ -1724,12 +1737,12 @@ Critical Changes: - The `project__dependencies` section in `cumulusci.yml` no longer supports nested dependencies specified like this: - dependencies: - - namespace: "test" - version: "1.0" - dependencies: - - namespace: "parent" - version: "2.2" + dependencies: + - namespace: "test" + version: "1.0" + dependencies: + - namespace: "parent" + version: "2.2" All dependencies should be listed in install order. @@ -1898,12 +1911,12 @@ Critical changes: - The `project__dependencies` section in `cumulusci.yml` will no longer support nested dependencies specified like this : - dependencies: - - namespace: "test" - version: "1.0" - dependencies: - - namespace: "parent" - version: "2.2" + dependencies: + - namespace: "test" + version: "1.0" + dependencies: + - namespace: "parent" + version: "2.2" All dependencies should be listed in install order. @@ -3510,33 +3523,33 @@ New features: : - - Added keywords for generating a collection of sObjects according to a template: + Added keywords for generating a collection of sObjects according to a template: - : - `Generate Test Data` - - `Salesforce Collection Insert` - - `Salesforce Collection Update` + : - `Generate Test Data` + - `Salesforce Collection Insert` + - `Salesforce Collection Update` - - + - - Changes to Page Objects: + Changes to Page Objects: - : - More than one page object can be loaded at once. - Once loaded, the keywords of a page object remain - visible in the suite. Robot will give priority to - keywords in the reverse order in which they were - imported. - - There is a new keyword, `Log Current Page Object`, - which can be useful to see information about the - most recently loaded page object. - - There is a new keyword, `Get Page Object`, which - will return the robot library for a given page - object. This can be used in other keywords to access - keywords from another page object if necessary. - - The `Go To Page` keyword will now automatically load - the page object for the given page. + : - More than one page object can be loaded at once. + Once loaded, the keywords of a page object remain + visible in the suite. Robot will give priority to + keywords in the reverse order in which they were + imported. + - There is a new keyword, `Log Current Page Object`, + which can be useful to see information about the + most recently loaded page object. + - There is a new keyword, `Get Page Object`, which + will return the robot library for a given page + object. This can be used in other keywords to access + keywords from another page object if necessary. + - The `Go To Page` keyword will now automatically load + the page object for the given page. - - Added a basic debugger for Robot tests. It can be enabled - using the `-o debug True` option to the robot task. + - Added a basic debugger for Robot tests. It can be enabled + using the `-o debug True` option to the robot task. - Added support for deploying new metadata types `ProfilePasswordPolicy` and `ProfileSessionSetting`. @@ -3611,8 +3624,8 @@ New features: permanently set this option, add this in `~/.cumulusci/cumulusci.yml`: - cli: - plain_output: True + cli: + plain_output: True - Added additional info to the `cci version` command, including the Python version, an upgrade check, and a warning on Python 2. @@ -4893,12 +4906,12 @@ Resolving a few issues from beta77: below. In flows that need to inject the actual namespace prefix, override the [unmanaged]{.title-ref} option .. : - custom_deploy_task: - class_path: cumulusci.tasks.salesforce.Deploy - options: - path: your/custom/metadata - namespace_inject: $project_config.project__package__namespace - unmanaged: False + custom_deploy_task: + class_path: cumulusci.tasks.salesforce.Deploy + options: + path: your/custom/metadata + namespace_inject: $project_config.project__package__namespace + unmanaged: False ### Enhancements @@ -5613,13 +5626,13 @@ Resolving a few issues from beta77: - **IMPORANT** This release changes the yaml structure for flows. The new structure now looks like this: - flows: - flow_name: - tasks: - 1: - task: deploy - 2: - task: run_tests + flows: + flow_name: + tasks: + 1: + task: deploy + 2: + task: run_tests - See the new flow customization examples in the cookbook for examples of why this change was made and how to use it: From 2d14a07c3582b45ce6890a33bea81770e88a4257 Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Mon, 6 Jan 2025 11:50:02 +0530 Subject: [PATCH 63/65] Fix test failures --- .../tasks/bulkdata/tests/test_select_utils.py | 57 +------------------ 1 file changed, 3 insertions(+), 54 deletions(-) diff --git a/cumulusci/tasks/bulkdata/tests/test_select_utils.py b/cumulusci/tasks/bulkdata/tests/test_select_utils.py index 589f66806a..dbd2a993ca 100644 --- a/cumulusci/tasks/bulkdata/tests/test_select_utils.py +++ b/cumulusci/tasks/bulkdata/tests/test_select_utils.py @@ -24,23 +24,7 @@ PANDAS_AVAILABLE = False -# Test Cases for standard_generate_query -def test_standard_generate_query_with_default_record_declaration(): - select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) - sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - limit = 5 - offset = 2 - query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset - ) - - assert "WHERE" in query # Ensure WHERE clause is included - assert f"LIMIT {limit}" in query - assert f"OFFSET {offset}" in query - assert fields == ["Id"] - - -def test_standard_generate_query_without_default_record_declaration(): +def test_standard_generate_query_without_filter(): select_operator = SelectOperationExecutor(SelectStrategy.STANDARD) sobject = "Contact" # Assuming no declaration for this object limit = 3 @@ -49,7 +33,6 @@ def test_standard_generate_query_without_default_record_declaration(): sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) - assert "WHERE" not in query # No WHERE clause should be present assert f"LIMIT {limit}" in query assert "OFFSET" not in query assert fields == ["Id"] @@ -72,23 +55,7 @@ def test_standard_generate_query_with_user_filter(): assert fields == ["Id"] -# Test Cases for random generate query -def test_random_generate_query_with_default_record_declaration(): - select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) - sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - limit = 5 - offset = 2 - query, fields = select_operator.select_generate_query( - sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset - ) - - assert "WHERE" in query # Ensure WHERE clause is included - assert f"LIMIT {limit}" in query - assert f"OFFSET {offset}" in query - assert fields == ["Id"] - - -def test_random_generate_query_without_default_record_declaration(): +def test_random_generate_query(): select_operator = SelectOperationExecutor(SelectStrategy.RANDOM) sobject = "Contact" # Assuming no declaration for this object limit = 3 @@ -97,7 +64,6 @@ def test_random_generate_query_without_default_record_declaration(): sobject=sobject, fields=[], user_filter="", limit=limit, offset=offset ) - assert "WHERE" not in query # No WHERE clause should be present assert f"LIMIT {limit}" in query assert "OFFSET" not in query assert fields == ["Id"] @@ -209,23 +175,7 @@ def test_random_post_process_with_no_records(): assert error_message == f"No records found for {sobject} in the target org." -# Test Cases for Similarity Generate Query -def test_similarity_generate_query_with_default_record_declaration(): - select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) - sobject = "Account" # Assuming Account has a declaration in DEFAULT_DECLARATIONS - limit = 5 - offset = 2 - query, fields = select_operator.select_generate_query( - sobject, ["Name"], [], limit, offset - ) - - assert "WHERE" in query # Ensure WHERE clause is included - assert fields == ["Id", "Name"] - assert f"LIMIT {limit}" in query - assert f"OFFSET {offset}" in query - - -def test_similarity_generate_query_without_default_record_declaration(): +def test_similarity_generate_query_no_nesting(): select_operator = SelectOperationExecutor(SelectStrategy.SIMILARITY) sobject = "Contact" # Assuming no declaration for this object limit = 3 @@ -234,7 +184,6 @@ def test_similarity_generate_query_without_default_record_declaration(): sobject, ["Name"], [], limit, offset ) - assert "WHERE" not in query # No WHERE clause should be present assert fields == ["Id", "Name"] assert f"LIMIT {limit}" in query assert "OFFSET" not in query From 409f4ee33be5836504347708eff4d6142b6e0f4d Mon Sep 17 00:00:00 2001 From: Jawadtp Date: Mon, 6 Jan 2025 12:28:32 +0530 Subject: [PATCH 64/65] Fix test failure due to incorrect url in casettes --- .../cassettes/TestSelect.test_select_random_strategy.yaml | 4 ++-- .../cassettes/TestSelect.test_select_similarity_strategy.yaml | 2 +- .../cassettes/TestSelect.test_select_standard_strategy.yaml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml index 508be49cb4..1f49e2cec0 100644 --- a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_random_strategy.yaml @@ -48,7 +48,7 @@ interactions: - request: method: GET - uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account body: null headers: *id004 response: @@ -125,7 +125,7 @@ interactions: - request: method: GET - uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements'%20LIMIT%205 + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20LIMIT%205 body: null headers: *id004 response: diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml index 31897e7650..5053086cfb 100644 --- a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_similarity_strategy.yaml @@ -48,7 +48,7 @@ interactions: - request: method: GET - uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account body: null headers: *id004 response: diff --git a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml index 508be49cb4..1f49e2cec0 100644 --- a/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml +++ b/cumulusci/tasks/bulkdata/tests/cassettes/TestSelect.test_select_standard_strategy.yaml @@ -48,7 +48,7 @@ interactions: - request: method: GET - uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements' + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id,%20Name,%20Description,%20Phone,%20AccountNumber%20FROM%20Account body: null headers: *id004 response: @@ -125,7 +125,7 @@ interactions: - request: method: GET - uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20WHERE%20Name%20!=%20'Sample%20Account%20for%20Entitlements'%20LIMIT%205 + uri: https://orgname.my.salesforce.com/services/data/v62.0/query/?q=SELECT%20Id%20FROM%20Account%20LIMIT%205 body: null headers: *id004 response: From 1225b76b6a236d3dda8a95b83a84ce513ec159f2 Mon Sep 17 00:00:00 2001 From: Bharath Chadarajupalli Date: Thu, 9 Jan 2025 14:41:37 -0800 Subject: [PATCH 65/65] Release v4.1.0 (#3868) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- cumulusci/__about__.py | 2 +- docs/history.md | 27 ++++++++++++++++++++++++--- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/cumulusci/__about__.py b/cumulusci/__about__.py index 486892be53..7039708762 100644 --- a/cumulusci/__about__.py +++ b/cumulusci/__about__.py @@ -1 +1 @@ -__version__ = "4.0.1.dev1" +__version__ = "4.1.0" diff --git a/docs/history.md b/docs/history.md index ab2bb0a133..69bdfa2d71 100644 --- a/docs/history.md +++ b/docs/history.md @@ -2,7 +2,7 @@ -## v4.0.1.dev1 (2025-01-01) +## v4.1.0 (2025-01-09) @@ -10,13 +10,34 @@ ### Changes 🎉 +- Remove default declaration for select rows query by [@mjawadtp](https://github.com/mjawadtp) in + [#3867](https://github.com/SFDO-Tooling/CumulusCI/pull/3867) - @W-17427085: Set ANNOY related dependencies to be optional by [@aditya-balachander](https://github.com/aditya-balachander) in [#3858](https://github.com/SFDO-Tooling/CumulusCI/pull/3858) - Update on the documentation for SFDX_ORG_CREATE_ARGS environment variable by [@lakshmi2506](https://github.com/lakshmi2506) in [#3861](https://github.com/SFDO-Tooling/CumulusCI/pull/3861) +- @W-16485311: Core Logic for Selecting Records from Target Org by [@aditya-balachander](https://github.com/aditya-balachander) in [#3818](https://github.com/SFDO-Tooling/CumulusCI/pull/3818) +- Add integration tests for all selection strategies by [@mjawadtp](https://github.com/mjawadtp) in [#3851](https://github.com/SFDO-Tooling/CumulusCI/pull/3851) +- @W-17357226: Fix for issue where zero threshold defaulted to select by [@aditya-balachander](https://github.com/aditya-balachander) in [#3853](https://github.com/SFDO-Tooling/CumulusCI/pull/3853) +- @W-17366392: Fix Omnistudio issues for the sf command format by [@lakshmi2506](https://github.com/lakshmi2506) in [#3855](https://github.com/SFDO-Tooling/CumulusCI/pull/3855) +- fix: restore task and flow reference docs by [@jstvz](https://github.com/jstvz) in [#3856](https://github.com/SFDO-Tooling/CumulusCI/pull/3856) +- @W-17412267: Fix for records not being inserted when threshold 0 by [@aditya-balachander](https://github.com/aditya-balachander) in [#3857](https://github.com/SFDO-Tooling/CumulusCI/pull/3857) -**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1.dev0...v4.0.1.dev1 +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1.dev1...v4.1.0 +## v4.1.0.dev1 (2025-01-01) + + + +## What's Changed + +### Changes 🎉 + +- @W-17427085: Set ANNOY related dependencies to be optional by [@aditya-balachander](https://github.com/aditya-balachander) in [#3858](https://github.com/SFDO-Tooling/CumulusCI/pull/3858) +- Update on the documentation for SFDX_ORG_CREATE_ARGS environment variable by [@lakshmi2506](https://github.com/lakshmi2506) in [#3861](https://github.com/SFDO-Tooling/CumulusCI/pull/3861) + +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1.dev0...v4.0.1.dev1 + ## v4.0.1.dev0 (2024-12-16) @@ -32,7 +53,7 @@ - fix: restore task and flow reference docs by [@jstvz](https://github.com/jstvz) in [#3856](https://github.com/SFDO-Tooling/CumulusCI/pull/3856) - @W-17412267: Fix for records not being inserted when threshold 0 by [@aditya-balachander](https://github.com/aditya-balachander) in [#3857](https://github.com/SFDO-Tooling/CumulusCI/pull/3857) -**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1...v4.0.1.dev0 +**Full Changelog**: https://github.com/SFDO-Tooling/CumulusCI/compare/v4.0.1...v4.1.0 ## v4.0.1 (2024-11-18)