diff --git a/docs/.doctrees/environment.pickle b/docs/.doctrees/environment.pickle index 3145d525..37112266 100644 Binary files a/docs/.doctrees/environment.pickle and b/docs/.doctrees/environment.pickle differ diff --git a/docs/.doctrees/index.doctree b/docs/.doctrees/index.doctree index 26766b49..e0295c7f 100644 Binary files a/docs/.doctrees/index.doctree and b/docs/.doctrees/index.doctree differ diff --git a/docs/.doctrees/skit_pipelines.api.doctree b/docs/.doctrees/skit_pipelines.api.doctree index 071f7d59..4b081def 100644 Binary files a/docs/.doctrees/skit_pipelines.api.doctree and b/docs/.doctrees/skit_pipelines.api.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.doctree b/docs/.doctrees/skit_pipelines.components.doctree index 572a1d79..1a172e82 100644 Binary files a/docs/.doctrees/skit_pipelines.components.doctree and b/docs/.doctrees/skit_pipelines.components.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.evaluate_slu_from_repo.doctree b/docs/.doctrees/skit_pipelines.components.evaluate_slu_from_repo.doctree index 86dbbc8b..9f9b102f 100644 Binary files a/docs/.doctrees/skit_pipelines.components.evaluate_slu_from_repo.doctree and b/docs/.doctrees/skit_pipelines.components.evaluate_slu_from_repo.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.final_conversation_generator.doctree b/docs/.doctrees/skit_pipelines.components.final_conversation_generator.doctree index 8d0cdbd0..b451b5d4 100644 Binary files a/docs/.doctrees/skit_pipelines.components.final_conversation_generator.doctree and b/docs/.doctrees/skit_pipelines.components.final_conversation_generator.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.invalidate_situations_in_db.doctree b/docs/.doctrees/skit_pipelines.components.invalidate_situations_in_db.doctree new file mode 100644 index 00000000..7188a5e4 Binary files /dev/null and b/docs/.doctrees/skit_pipelines.components.invalidate_situations_in_db.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.retrain_slu_from_repo.doctree b/docs/.doctrees/skit_pipelines.components.retrain_slu_from_repo.doctree index f8b5cd02..d58b40b3 100644 Binary files a/docs/.doctrees/skit_pipelines.components.retrain_slu_from_repo.doctree and b/docs/.doctrees/skit_pipelines.components.retrain_slu_from_repo.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.sample_conversations_generator.doctree b/docs/.doctrees/skit_pipelines.components.sample_conversations_generator.doctree index a69c479f..c43426cb 100644 Binary files a/docs/.doctrees/skit_pipelines.components.sample_conversations_generator.doctree and b/docs/.doctrees/skit_pipelines.components.sample_conversations_generator.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.upload_conv_to_labelstudio.doctree b/docs/.doctrees/skit_pipelines.components.upload_conv_to_labelstudio.doctree index 36d6f1dc..5192227f 100644 Binary files a/docs/.doctrees/skit_pipelines.components.upload_conv_to_labelstudio.doctree and b/docs/.doctrees/skit_pipelines.components.upload_conv_to_labelstudio.doctree differ diff --git a/docs/.doctrees/skit_pipelines.components.upload_conversation_data_to_metrics_db.doctree b/docs/.doctrees/skit_pipelines.components.upload_conversation_data_to_metrics_db.doctree index d8227d43..c49e161f 100644 Binary files a/docs/.doctrees/skit_pipelines.components.upload_conversation_data_to_metrics_db.doctree and b/docs/.doctrees/skit_pipelines.components.upload_conversation_data_to_metrics_db.doctree differ diff --git a/docs/.doctrees/skit_pipelines.doctree b/docs/.doctrees/skit_pipelines.doctree index ae44472f..2ba0c5d5 100644 Binary files a/docs/.doctrees/skit_pipelines.doctree and b/docs/.doctrees/skit_pipelines.doctree differ diff --git a/docs/.doctrees/skit_pipelines.pipelines.doctree b/docs/.doctrees/skit_pipelines.pipelines.doctree index 3875ff60..32d78b24 100644 Binary files a/docs/.doctrees/skit_pipelines.pipelines.doctree and b/docs/.doctrees/skit_pipelines.pipelines.doctree differ diff --git a/docs/.doctrees/skit_pipelines.pipelines.generate_and_tag_conversations.doctree b/docs/.doctrees/skit_pipelines.pipelines.generate_and_tag_conversations.doctree index 50a5e4ee..da754309 100644 Binary files a/docs/.doctrees/skit_pipelines.pipelines.generate_and_tag_conversations.doctree and b/docs/.doctrees/skit_pipelines.pipelines.generate_and_tag_conversations.doctree differ diff --git a/docs/.doctrees/skit_pipelines.pipelines.invalidate_llm_situations_in_db.doctree b/docs/.doctrees/skit_pipelines.pipelines.invalidate_llm_situations_in_db.doctree new file mode 100644 index 00000000..8c332ebd Binary files /dev/null and b/docs/.doctrees/skit_pipelines.pipelines.invalidate_llm_situations_in_db.doctree differ diff --git a/docs/_modules/index.html b/docs/_modules/index.html index ee8e860a..cb8caac8 100644 --- a/docs/_modules/index.html +++ b/docs/_modules/index.html @@ -59,7 +59,8 @@

All modules for which code is available

  • skit_pipelines.components.gen_asr_metrics
  • skit_pipelines.components.identify_compliance_breaches_llm
  • skit_pipelines.components.merge_transcription
  • +
  • skit_pipelines.components.invalidate_situations_in_db
  • +
  • skit_pipelines.components.merge_transcription
  • skit_pipelines.components.modify_entities.duckling_inference
  • skit_pipelines.components.modify_entities.modify_predictions
  • skit_pipelines.components.modify_tagged_entities
  • @@ -103,11 +104,14 @@

    All modules for which code is available

  • skit_pipelines.pipelines.fetch_tagged_entity_dataset
  • skit_pipelines.pipelines.generate_and_tag_conversations
  • skit_pipelines.pipelines.generate_sample_conversations
  • +
  • skit_pipelines.pipelines.invalidate_llm_situations_in_db
  • skit_pipelines.pipelines.publish_compliance_breaches
  • skit_pipelines.pipelines.retrain_slu
  • skit_pipelines.pipelines.retrain_slu_old
  • skit_pipelines.pipelines.tag_calls
  • skit_pipelines.pipelines.transcription_pipeline
  • +
  • skit_pipelines.types.situation_mapping_info
  • +
  • skit_pipelines.types.tag_calls
  • skit_pipelines.utils
  • @@ -325,7 +329,7 @@

    Quick search

    | Powered by Sphinx 4.4.0 - & Alabaster 0.7.12 + & Alabaster 0.7.13
    diff --git a/docs/_modules/skit_pipelines/api/models/custom_models.html b/docs/_modules/skit_pipelines/api/models/custom_models.html index e33a1a7c..f5bacb3e 100644 --- a/docs/_modules/skit_pipelines/api/models/custom_models.html +++ b/docs/_modules/skit_pipelines/api/models/custom_models.html @@ -57,7 +57,7 @@

    Source code for skit_pipelines.api.models.custom_models

    [docs]def get_kf_object_uri(obj: Dict[str, Any], store="s3") -> str: key = obj[store][const.ARTIFACT_URI_KEY] - bucket = obj[store].get(const.OBJECT_BUCKET, const.KUBEFLOW_BUCKET) + bucket = obj[store].get(const.OBJECT_BUCKET, const.KUBEFLOW_SANDBOX_BUCKET) if store == "s3": return f"s3://{bucket}/{key}" else: @@ -156,8 +156,11 @@

    Navigation

  • Upload for annotation
  • Download tagged entity dataset
  • Retrain SLU
  • +
  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • @@ -196,7 +199,7 @@

    Quick search

    | Powered by Sphinx 4.4.0 - & Alabaster 0.7.12 + & Alabaster 0.7.13
    diff --git a/docs/_modules/skit_pipelines/api/slack_bot.html b/docs/_modules/skit_pipelines/api/slack_bot.html index de53d914..6692eff5 100644 --- a/docs/_modules/skit_pipelines/api/slack_bot.html +++ b/docs/_modules/skit_pipelines/api/slack_bot.html @@ -336,6 +336,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/api/validate_input.html b/docs/_modules/skit_pipelines/api/validate_input.html index 81c9e606..6e3acbd8 100644 --- a/docs/_modules/skit_pipelines/api/validate_input.html +++ b/docs/_modules/skit_pipelines/api/validate_input.html @@ -89,7 +89,24 @@

    Source code for skit_pipelines.api.validate_input

    if self.pipeline_name == "fetch_n_tag_turns_and_calls" and \ ("labelstudio_project_id" not in self.payload and "call_project_id" not in self.payload): self.errors.append(f"At least one of labelstudio_project_id or call_project_id must be provided.\n") - + + def _validate_situation_present(self): + if self.pipeline_name in ["generate_and_tag_conversations", "generate_sample_conversations"] and "situations" not in self.payload: + self.errors.append(f"At least one situation must be provided.\n") + + def _validate_generate_and_tag_conversations_params(self): + if self.pipeline_name == "generate_and_tag_conversations": + if "template_id" not in self.payload: + self.errors.append(f"Parameter template_id required for generate_and_tag_conversations pipeline\n") + if "client_id" not in self.payload: + self.errors.append(f"Parameter client_id required for generate_and_tag_conversations pipeline\n") + if "labelstudio_project_id" not in self.payload: + self.errors.append(f"Parameter labelstudio_project_id required for generate_and_tag_conversations pipeline\n") + if "scenario" not in self.payload: + self.errors.append("Parameter scenario is mandatory for generate_and_tag_conversations pipeline\n") + if "scenario_category" not in self.payload: + self.errors.append("Parameter scenario_category is mandatory for generate_and_tag_conversations pipeline\n") +
    [docs] def validate_input_params(self): # Universal checks self._validate_start_date() @@ -99,6 +116,9 @@

    Source code for skit_pipelines.api.validate_input

    # Pipeline specific checks self._validate_repo_for_retrain_slu() self._validate_label_studio_ids_for_fetch_n_tag_turns_calls() + self._validate_situation_present() + self._validate_generate_and_tag_conversations_params() + return self.errors
    @@ -129,6 +149,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/evaluate_slu_from_repo.html b/docs/_modules/skit_pipelines/components/evaluate_slu_from_repo.html index 44aae3ad..3c3874f8 100644 --- a/docs/_modules/skit_pipelines/components/evaluate_slu_from_repo.html +++ b/docs/_modules/skit_pipelines/components/evaluate_slu_from_repo.html @@ -161,6 +161,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/fetch_tagged_dataset.html b/docs/_modules/skit_pipelines/components/fetch_tagged_dataset.html index 6b38484e..28967bfb 100644 --- a/docs/_modules/skit_pipelines/components/fetch_tagged_dataset.html +++ b/docs/_modules/skit_pipelines/components/fetch_tagged_dataset.html @@ -51,7 +51,6 @@

    Source code for skit_pipelines.components.fetch_tagged_dataset

    end_date_offset: Optional[int] = None, empty_possible: bool = False, ): - import asyncio import time import pandas as pd @@ -60,21 +59,12 @@

    Source code for skit_pipelines.components.fetch_tagged_dataset

    from skit_calls.cli import process_date_filters from skit_labels import constants as const from skit_labels import utils - from skit_labels.commands import ( - download_dataset_from_db, - download_dataset_from_labelstudio, - ) + from skit_labels.commands import download_dataset_from_db from skit_pipelines import constants as pipeline_constants from skit_pipelines.utils.normalize import comma_sep_str utils.configure_logger(7) - - host = pipeline_constants.DB_HOST - port = pipeline_constants.DB_PORT - password = pipeline_constants.DB_PASSWORD - user = pipeline_constants.DB_USER - if not timezone: timezone = pipeline_constants.TIMEZONE @@ -99,10 +89,10 @@

    Source code for skit_pipelines.components.fetch_tagged_dataset

    timezone=pytz.timezone(timezone) if timezone else None, start_date=start_date or None, end_date=end_date or None, - host=host, - port=port, - password=password, - user=user, + host=pipeline_constants.DB_HOST, + port=pipeline_constants.DB_PORT, + password=pipeline_constants.DB_PASSWORD, + user=pipeline_constants.DB_USER, db=const.LABELSTUIO_DB if project_id else "tog", ) df_paths.append(df_path) @@ -148,8 +138,11 @@

    Navigation

  • Upload for annotation
  • Download tagged entity dataset
  • Retrain SLU
  • +
  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • @@ -188,7 +181,7 @@

    Quick search

    | Powered by Sphinx 4.4.0 - & Alabaster 0.7.12 + & Alabaster 0.7.13
    diff --git a/docs/_modules/skit_pipelines/components/invalidate_situations_in_db.html b/docs/_modules/skit_pipelines/components/invalidate_situations_in_db.html new file mode 100644 index 00000000..6274271c --- /dev/null +++ b/docs/_modules/skit_pipelines/components/invalidate_situations_in_db.html @@ -0,0 +1,149 @@ + + + + + + + + skit_pipelines.components.invalidate_situations_in_db — skit_pipelines documentation + + + + + + + + + + + + + + + + + +
    +
    +
    + + +
    + +

    Source code for skit_pipelines.components.invalidate_situations_in_db

    +
    +import kfp
    +
    +from skit_pipelines import constants as pipeline_constants
    +
    +
    [docs]def invalidate_situations_in_db(situation_id): + """ + Check if the situation exists in db, if exists return the id else insert the situation to db and return the id + """ + from skit_pipelines import constants as pipeline_constants + from skit_pipelines.components.invalidate_situations_in_db.queries import UPDATE_IS_VALID_SITUATION + + from loguru import logger + import psycopg2 + + situation_id_list = [int(val.strip()) for val in situation_id.split(',')] + + logger.info(f"Situation id list: {situation_id_list}") + + conn = psycopg2.connect( + dbname=pipeline_constants.ML_METRICS_DB_NAME, + user=pipeline_constants.ML_METRICS_DB_USER, + password=pipeline_constants.ML_METRICS_DB_PASSWORD, + host=pipeline_constants.ML_METRICS_DB_HOST, + port=pipeline_constants.ML_METRICS_DB_PORT, + ) + + for id_value in situation_id_list: + cur = conn.cursor() + cur.execute(UPDATE_IS_VALID_SITUATION, (id_value,)) + conn.commit() + + cur.close() + conn.close()
    + +invalidate_situations_in_db_op = kfp.components.create_component_from_func( + invalidate_situations_in_db, base_image=pipeline_constants.BASE_IMAGE +) +
    + +
    + +
    +
    + +
    +
    + + + + + + + \ No newline at end of file diff --git a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo.html b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo.html index f8726572..16d29c23 100644 --- a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo.html +++ b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo.html @@ -224,6 +224,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo/comparision_report_generator.html b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo/comparision_report_generator.html index 2e16b5e2..1ca2d160 100644 --- a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo/comparision_report_generator.html +++ b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo/comparision_report_generator.html @@ -110,7 +110,7 @@

    Source code for skit_pipelines.components.retrain_slu_from_repo.comparision_ columns=["precision", "recall", "f1-score", "support"], ) - comparison_df.to_csv(output_path, index=False) + comparison_df.to_csv(output_path) # Print the comparison report using tabulate for better formatting print(tabulate(comparison_df, headers="keys", tablefmt="psql"))

    @@ -170,6 +170,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo_old.html b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo_old.html index 7ce775a8..50d61e18 100644 --- a/docs/_modules/skit_pipelines/components/retrain_slu_from_repo_old.html +++ b/docs/_modules/skit_pipelines/components/retrain_slu_from_repo_old.html @@ -392,6 +392,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/upload_conversation_data_to_metrics_db.html b/docs/_modules/skit_pipelines/components/upload_conversation_data_to_metrics_db.html index 4c996524..d91e3b05 100644 --- a/docs/_modules/skit_pipelines/components/upload_conversation_data_to_metrics_db.html +++ b/docs/_modules/skit_pipelines/components/upload_conversation_data_to_metrics_db.html @@ -84,7 +84,7 @@

    Source code for skit_pipelines.components.upload_conversation_data_to_metric prompt_s3_path = upload_file_to_s3( path_on_disk=prompt_local_path, upload_path=s3_prompt_dir_name, - bucket=pipeline_constants.KUBEFLOW_BUCKET, + bucket=pipeline_constants.KUBEFLOW_SANDBOX_BUCKET, ) logger.info(f"Prompt local path: {prompt_local_path}") @@ -169,6 +169,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/upload_for_call_and_slot_tagging.html b/docs/_modules/skit_pipelines/components/upload_for_call_and_slot_tagging.html index df5eca9f..d3711b81 100644 --- a/docs/_modules/skit_pipelines/components/upload_for_call_and_slot_tagging.html +++ b/docs/_modules/skit_pipelines/components/upload_for_call_and_slot_tagging.html @@ -59,6 +59,7 @@

    Source code for skit_pipelines.components.upload_for_call_and_slot_tagging"language", "call_end_status", "disposition", + "previous_disposition", "flow_id", "flow_version", "flow_name", @@ -138,8 +139,11 @@

    Navigation

  • Upload for annotation
  • Download tagged entity dataset
  • Retrain SLU
  • +
  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • @@ -178,7 +182,7 @@

    Quick search

    | Powered by Sphinx 4.4.0 - & Alabaster 0.7.12 + & Alabaster 0.7.13
    diff --git a/docs/_modules/skit_pipelines/components/utils.html b/docs/_modules/skit_pipelines/components/utils.html index d48d1e20..a8bfb54e 100644 --- a/docs/_modules/skit_pipelines/components/utils.html +++ b/docs/_modules/skit_pipelines/components/utils.html @@ -181,6 +181,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/utils_slu.html b/docs/_modules/skit_pipelines/components/utils_slu.html index b84531ed..f10b4f18 100644 --- a/docs/_modules/skit_pipelines/components/utils_slu.html +++ b/docs/_modules/skit_pipelines/components/utils_slu.html @@ -328,6 +328,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/validate_and_add_situations_to_db.html b/docs/_modules/skit_pipelines/components/validate_and_add_situations_to_db.html index f1418ae2..14f551ab 100644 --- a/docs/_modules/skit_pipelines/components/validate_and_add_situations_to_db.html +++ b/docs/_modules/skit_pipelines/components/validate_and_add_situations_to_db.html @@ -53,7 +53,8 @@

    Source code for skit_pipelines.components.validate_and_add_situations_to_db< logger.info(f"Situations: {situations}") logger.info(f"scenario: {scenario}") logger.info(f"scenario_category: {scenario_category}") - + if not scenario or not scenario_category: + raise Exception(f"Either scenario or scenario_category is empty. Please pass in the values for the same") conn = psycopg2.connect( dbname=pipeline_constants.ML_METRICS_DB_NAME, @@ -71,23 +72,24 @@

    Source code for skit_pipelines.components.validate_and_add_situations_to_db< cur.execute(CREATE_SITUATIONS_MAPPING_TABLE_QUERY) conn.commit() - for situation in situations: situation_info = {} situation = situation.lower() cur = conn.cursor() - cur.execute(SEARCH_SITUATION_QUERY, (situation,)) + scenario_category = scenario_category.upper() + scenario = scenario.lower() + query_parameters = { + "situation": situation, + "scenario": scenario, + "scenario_category" :scenario_category + } + cur.execute(SEARCH_SITUATION_QUERY, query_parameters) record = cur.fetchone() if record: id_val = record[0] logger.info(f"ID in table: {id_val}") else: - - scenario_category = scenario_category.upper() - scenario = scenario.lower() - - query_parameters = {"situation": situation, "scenario": scenario, "scenario_category" :scenario_category} cur.execute(INSERT_SITUATION_QUERY, query_parameters) conn.commit() @@ -144,6 +146,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/components/zip_files_and_notify.html b/docs/_modules/skit_pipelines/components/zip_files_and_notify.html index 35170a5b..65f60fe1 100644 --- a/docs/_modules/skit_pipelines/components/zip_files_and_notify.html +++ b/docs/_modules/skit_pipelines/components/zip_files_and_notify.html @@ -78,8 +78,8 @@

    Source code for skit_pipelines.components.zip_files_and_notify

    all_files = [file for file in all_files if not file.endswith('.csv')] if 'prompt.txt' in all_files: all_files.remove('prompt.txt') - if 'situation.txt' in all_files: - all_files.remove('situation.txt') + if 'situation.json' in all_files: + all_files.remove('situation.json') random.shuffle(all_files) selected_file = all_files[0] @@ -163,6 +163,8 @@

    Navigation

  • Publish Complaince Breaches
  • Transcribe Dataset
  • Random sample and tag turns and calls
  • +
  • Generate sample conversations for LLMs
  • +
  • Generate conversations for LLMS and upload it for tagging
  • diff --git a/docs/_modules/skit_pipelines/constants.html b/docs/_modules/skit_pipelines/constants.html index 108c183f..31fc783d 100644 --- a/docs/_modules/skit_pipelines/constants.html +++ b/docs/_modules/skit_pipelines/constants.html @@ -101,6 +101,7 @@

    Source code for skit_pipelines.constants

     TEST = "test"
     LABELS = "labels"
     CSV_FILE = ".csv"
    +TEXT_FILE = ".txt"
     WAV_FILE = ".wav"
     UTTERANCES = "utterances"
     ALTERNATIVES = "alternatives"
    @@ -151,7 +152,6 @@ 

    Source code for skit_pipelines.constants

     COOKIES_PATH = "/tmp/kf_cookies.json"
     ACCESS_TOKEN_PATH = "/tmp/kfp_server_token.json"
     KUBEFLOW_GATEWAY_ENDPOINT = os.environ["KUBEFLOW_GATEWAY_ENDPOINT"]
    -KUBEFLOW_BUCKET = "kubeflow-skit" if REGION == AP_SOUTH_1 else "kubeflow-us-cluster"
     COOKIE_0 = "AWSELBAuthSessionCookie-0"
     COOKIE_1 = "AWSELBAuthSessionCookie-1"
     COOKIE_DICT = {COOKIE_0: None, COOKIE_1: None}
    @@ -163,6 +163,9 @@ 

    Source code for skit_pipelines.constants

     )
     LABELSTUDIO_TOKEN = os.environ["LABELSTUDIO_TOKEN"]
     
    +"Kubeflow doesnt follow a staging/production cycle so a single implementation accesses all possible buckets"
    +KUBEFLOW_BUCKET = "kubeflow-skit" if REGION == AP_SOUTH_1 else "kubeflow-us-cluster"
    +KUBEFLOW_SANDBOX_BUCKET = "kubeflow-in-sandbox" if REGION == AP_SOUTH_1 else "kubeflow-us-sandbox"