From f0291290540b9dbef6dd97b9169fad3e83f18535 Mon Sep 17 00:00:00 2001 From: vumichien Date: Thu, 25 Apr 2024 11:07:29 +0900 Subject: [PATCH 1/3] revise testcase using additional files from Simon --- data/clean/f_657_simon_chien_edit.py | 144 ++++++++++++++++++++ data/clean/f_661_simon_chien_edit.py | 129 ++++++++++++++++++ data/clean/f_663_simon_chien_edit.py | 193 +++++++++++++++++++++++++++ data/clean/f_674_simon_chien_edit.py | 132 ++++++++++++++++++ data/clean/f_675_simon_chien_edit.py | 158 ++++++++++++++++++++++ data/clean/f_694_simon_chien_edit.py | 116 ++++++++++++++++ data/clean/f_723_simon_chien_edit.py | 137 +++++++++++++++++++ data/clean/f_724_simon_chien_edit.py | 146 ++++++++++++++++++++ data/clean/f_725_simon_chien_edit.py | 122 +++++++++++++++++ data/clean/f_728_simon_chien_edit.py | 176 ++++++++++++++++++++++++ data/clean/f_730_simon_chien_edit.py | 128 ++++++++++++++++++ data/clean/f_731_simon_chien_edit.py | 151 +++++++++++++++++++++ data/clean/f_732_simon_chien_edit.py | 143 ++++++++++++++++++++ data/clean/f_733_simon_chien_edit.py | 126 +++++++++++++++++ 14 files changed, 2001 insertions(+) create mode 100644 data/clean/f_657_simon_chien_edit.py create mode 100644 data/clean/f_661_simon_chien_edit.py create mode 100644 data/clean/f_663_simon_chien_edit.py create mode 100644 data/clean/f_674_simon_chien_edit.py create mode 100644 data/clean/f_675_simon_chien_edit.py create mode 100644 data/clean/f_694_simon_chien_edit.py create mode 100644 data/clean/f_723_simon_chien_edit.py create mode 100644 data/clean/f_724_simon_chien_edit.py create mode 100644 data/clean/f_725_simon_chien_edit.py create mode 100644 data/clean/f_728_simon_chien_edit.py create mode 100644 data/clean/f_730_simon_chien_edit.py create mode 100644 data/clean/f_731_simon_chien_edit.py create mode 100644 data/clean/f_732_simon_chien_edit.py create mode 100644 data/clean/f_733_simon_chien_edit.py diff --git a/data/clean/f_657_simon_chien_edit.py b/data/clean/f_657_simon_chien_edit.py new file mode 100644 index 00000000..1110b032 --- /dev/null +++ b/data/clean/f_657_simon_chien_edit.py @@ -0,0 +1,144 @@ +import re +import os +import glob + + +def f_657(dir_path): + """ + Search for occurrences of the word "error" in all text files within a + specified directory and its subdirectories. + + Parameters: + dir_path (str): The path of the directory. + + Returns: + dict: A dictionary with relative file paths as keys and the count of + occurrences of the word "error" as values. + + Raises: + - ValueError: If directory in dir_path does not exist. + + Requirements: + - re: For regex pattern matching. + - os: For retrieving relative file paths. + - glob: For fetching all text file paths in the directory. + + The function specifically searches for the word "error" in text files + (with the extension ".txt"). + This function is NOT case sensitive, e.g. also "ERROr" will be counted. + + Example: + >>> f_657("/path/to/directory") + {'file1.txt': 2, 'subdir/file2.txt': 1} + + >>> f_657("/path/to/directory") + {'test.txt': 245, 'subdir/test2.txt': 0, 'subdir/sf/test3.txt': 1} + """ + + if not os.path.isdir(dir_path): + raise ValueError("Specified directory does not exist.") + + result = {} + file_paths = glob.glob(f'{dir_path}/**/*.txt', recursive=True) + for file_path in file_paths: + with open(file_path, 'r') as file: + content = file.read() + matches = re.findall(r'\berror\b', content, re.IGNORECASE) + # Always set the file's count in the result dictionary, even if it's 0 + result[os.path.relpath(file_path, dir_path)] = len(matches) + + return result + + +import unittest +import os +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to simulate test environments + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_file(self, sub_path, content=""): + # Helper method to create a file with given content + full_path = os.path.join(self.test_dir, sub_path) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, 'w') as file: + file.write(content) + # Return normalized path for cross-platform compatibility + return os.path.normpath(sub_path) + + def test_non_existent(self): + # Expect ValueError for non-existent directory + with self.assertRaises(ValueError): + f_657(os.path.join(self.test_dir, "non_existent")) + + def test_empty_folder(self): + # Test empty directory + result = f_657(self.test_dir) + self.assertEqual(result, {}) + + def test_files_with_errors(self): + # Files with varying counts of 'error' + files = { + "1.txt": "error\nERROR\nErrOr", + "subfolder1/2.txt": "", + "subfolder2/3.txt": "error\nerror error" + } + expected = { + os.path.normpath("1.txt"): 3, + os.path.normpath("subfolder1/2.txt"): 0, + os.path.normpath("subfolder2/3.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + def test_case_sensitive_and_realistic_text(self): + # More complex scenarios, including nested directories + file_path = self.create_file('nested/folder1/folder2/error_log.txt', 'Error\nerror\nERROR') + expected = {file_path: 3} + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + def test_exact_word_matching(self): + # Ensure only the exact word 'error' is counted and ignore similar words like 'errors' + files = { + "file1.txt": "error error error", # Should count 3 times + "subdir/file2.txt": "errors error erro errors", # Should count 1 time + "subdir2/nested/file3.txt": "an error occurred", # Should count 1 time + "subdir3/file4.txt": "no errors here", # Should count 0 times + "subdir3/file5.txt": "Error and ERROR and error" # Should count 3 times, case insensitive + } + expected = { + os.path.normpath("file1.txt"): 3, + os.path.normpath("subdir/file2.txt"): 1, + os.path.normpath("subdir2/nested/file3.txt"): 1, + os.path.normpath("subdir3/file4.txt"): 0, + os.path.normpath("subdir3/file5.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_661_simon_chien_edit.py b/data/clean/f_661_simon_chien_edit.py new file mode 100644 index 00000000..a03c5c79 --- /dev/null +++ b/data/clean/f_661_simon_chien_edit.py @@ -0,0 +1,129 @@ +import pandas as pd +from sklearn.linear_model import LinearRegression + + +def f_661(file_path, output_path=None, sort_key='title', linear_regression=False, x_column=None, y_column=None): + """ + Sorts a CSV file by a specific column key using pandas, and optionally writes the sorted data to another CSV file. + Can also fit a linear regression model to specified columns if required. + + Parameters: + file_path (str): The path to the input CSV file. This parameter is required. + output_path (str): The path where the sorted CSV will be saved. If not provided, the function won't save the sorted dataframe. + sort_key (str): The column name used as a key to sort the CSV file. Defaults to 'title'. + linear_regression (bool): If True, fits a linear regression model to the specified columns. Defaults to False. + x_column (str): The name of the column to use as the predictor variable for linear regression. + y_column (str): The name of the column to use as the response variable for linear regression. + + Returns: DataFrame, str, or LinearRegression model: The sorted pandas DataFrame if 'output_path' is None and + 'linear_regression' is False, otherwise the path to the saved output file. If 'linear_regression' is True, + returns the fitted model. + + Requirements: + - pandas + - scikit-learn + + Example: + >>> model = f_661('data.csv', sort_key='title', linear_regression=True, x_column='age', y_column='salary') + >>> # Returns a fitted LinearRegression model based on 'age' and 'salary' columns. + + Raises: + Exception: If there is an error in reading, sorting the data, or fitting the model. + """ + try: + df = pd.read_csv(file_path) + df.sort_values(by=[sort_key], inplace=True) + + if linear_regression: + if x_column not in df.columns or y_column not in df.columns: + raise ValueError("Specified columns for linear regression do not exist in the dataframe") + + X = df[[x_column]] + y = df[y_column] + model = LinearRegression().fit(X, y) + return model + + if output_path: + df.to_csv(output_path, index=False) + return output_path + else: + return df + except Exception as e: + raise Exception(f"Error while processing the file: {str(e)}") + + +import unittest +import pandas as pd +import numpy as np +import os +import shutil +import tempfile + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory for test files + self.test_dir = tempfile.mkdtemp() + self.test_csv_path = os.path.join(self.test_dir, 'test_data.csv') + # Create a sample CSV file + df = pd.DataFrame({ + 'title': ['Book C', 'Book A', 'Book B'], + 'x': [1, 2, 3], + 'y': [5, 7, 9] + }) + df.to_csv(self.test_csv_path, index=False) + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def test_valid_input_no_output_path(self): + # Test with valid input, no output file specified (should return DataFrame) + df = f_661(self.test_csv_path, sort_key='title') + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue(df['title'].is_monotonic_increasing) + + def test_invalid_file_path(self): + # Test with invalid file path (should raise an exception) + with self.assertRaises(Exception): + f_661(os.path.join(self.test_dir, 'non_existent.csv')) + + def test_invalid_sort_key(self): + # Test with invalid sort key (should raise an exception) + with self.assertRaises(Exception): + f_661(self.test_csv_path, sort_key='non_existent_column') + + def test_output_data_saving(self): + # Test if the function saves the sorted data correctly when an output path is provided + output_path = os.path.join(self.test_dir, 'sorted_data.csv') + result_path = f_661(self.test_csv_path, output_path=output_path, sort_key='title') + self.assertEqual(result_path, output_path) + # Check if the file is created and is not empty + self.assertTrue(os.path.exists(output_path)) + self.assertGreater(os.stat(output_path).st_size, 0) + + def test_linear_regression_functionality(self): + # Test if linear regression model is fitted correctly + model = f_661(self.test_csv_path, linear_regression=True, x_column='x', y_column='y') + self.assertIsInstance(model, LinearRegression) + # Check if coefficients are as expected (approximate) + np.testing.assert_almost_equal(model.coef_, [2], decimal=1) + np.testing.assert_almost_equal(model.intercept_, 3, decimal=1) + + def test_linear_regression_error_on_invalid_columns(self): + # Test error handling for non-existent columns in linear regression + with self.assertRaises(Exception) as context: + f_661(self.test_csv_path, linear_regression=True, x_column='nonexistent', y_column='title') + self.assertIn("Specified columns for linear regression do not exist in the dataframe", str(context.exception)) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +# This is required to run the test cases +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_663_simon_chien_edit.py b/data/clean/f_663_simon_chien_edit.py new file mode 100644 index 00000000..c7ff2f53 --- /dev/null +++ b/data/clean/f_663_simon_chien_edit.py @@ -0,0 +1,193 @@ +import pandas as pd +import pytz + + +def f_663(articles, timezone): + """ + Analyze the publication times of a list of articles: + 1) Convert 'published_time' to a specified timezone + 2) Group articles by 'category' + 3) For each category, calculate the count, mean, min, max publication times only considering the hour. + + Parameters: + articles (list): A list of dictionaries where each dictionary represents + an article with keys 'title', 'title_url', 'id', 'category', and 'published_time' (in UTC). + timezone (str): The string representation of the timezone to which the 'published_time' should be converted. + + Returns: + DataFrame: A pandas DataFrame with the count, mean, min, max publication hour for each category. + The category is the index of the DataFrame. + + Raises: + ValueError: If dictionary keys do not match the requirements. + TypeError: If articles is not a list of dictionaries. + ValueError: If an empty list is passed as articles. + + Requirements: + - pandas + - pytz + + Example: + >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 1 3.0 3 3 + Sports 1 19.0 19 19 + Technology 1 8.0 8 8 + + >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 1 3.0 3 3 + Sports 1 19.0 19 19 + Technology 1 8.0 8 8 + + >>> articles = [ + ... {'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': '09:01:04.403278+00:00'}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': '02:03:04.403278+00:00'}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': '21:11:01.403278+00:00'}, + ... {'title': 'newsies', 'title_url': 'newsies.news', 'id': 21, 'category': 'Technology', 'published_time': '4:25:12.403278+00:00'}, + ... {'title': 'ORF', 'title_url': 'orf.at', 'id': 44, 'category': 'Health', 'published_time': '03:04:03.403278+00:00'}, + ... {'title': 'ARD', 'title_url': 'ard.com', 'id': 61, 'category': 'Health', 'published_time': '11:41:12.403278+00:00'}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 3 15.666667 7 23 + Sports 1 22.000000 22 22 + Technology 2 2.500000 0 5 + """ + + if not isinstance(articles, list): + raise TypeError("articles should be a list of dictionaries.") + + if not all(isinstance(item, dict) for item in articles): + raise TypeError("articles should be a list of dictionaries.") + + if len(articles) == 0: + raise ValueError("input articles list should contain at least one article.") + + if any(not sorted(dic.keys()) == ['category', 'id', 'published_time', 'title', 'title_url'] for dic in articles): + raise ValueError( + "input dictionaries must contain the following keys: 'category', 'id', 'title', 'title_url', 'published_time'") + + tz = pytz.timezone(timezone) + for article in articles: + article['published_time'] = pd.to_datetime(article['published_time']).astimezone(tz) + + df = pd.DataFrame(articles) + df['published_time'] = df['published_time'].dt.hour + + analysis_df = df.groupby('category')['published_time'].agg(['count', 'mean', 'min', 'max']) + + return analysis_df + + +import unittest +import pandas as pd +import pytz +from datetime import datetime + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + self.articles = [ + {'title': 'Apple News', 'title_url': 'apple.com/news', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.UTC)}, + {'title': 'Sports Update', 'title_url': 'sports.com/update', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 15, 0, tzinfo=pytz.UTC)}, + {'title': 'Health Today', 'title_url': 'health.com/today', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 8, 0, tzinfo=pytz.UTC)} + ] + + def test_empty_articles_list(self): + # Test handling of empty list + with self.assertRaises(ValueError): + f_663([], 'America/New_York') + + def test_invalid_article_format(self): + # Test handling of improperly formatted articles list + with self.assertRaises(ValueError): + f_663([{'wrong_key': 'wrong_value'}], 'America/New_York') + + def test_conversion_and_grouping(self): + timezone = 'America/New_York' + result_df = f_663(self.articles, timezone) + + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 3.0, 'Sports': 10.0, 'Technology': 7.0}, + 'min': {'Health': 3, 'Sports': 10, 'Technology': 7}, + 'max': {'Health': 3, 'Sports': 10, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + # Ensure the data types match, especially for integer columns + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + expected_df.index.name = 'category' + + pd.testing.assert_frame_equal(result_df, expected_df) + + def test_article_timezone_conversion(self): + # Assuming test data has UTC as the base timezone and checking against London timezone + result = f_663(self.articles, 'Europe/London') + expected_hours = [8.0, 15.0, 12.0] + actual_hours = result.reset_index()['mean'].tolist() + self.assertEqual(expected_hours, actual_hours) + + def test_different_timezones_across_categories(self): + # Create a set of articles across different categories and timezones + articles = [ + {'title': 'Tech Trends', 'title_url': 'tech.com/trends', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('UTC'))}, + {'title': 'World Sports', 'title_url': 'sports.com/world', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('Asia/Tokyo'))}, # +9 hours from UTC + {'title': 'Health News', 'title_url': 'health.com/news', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('America/Los_Angeles'))} + # -8 hours from UTC + ] + timezone = 'America/New_York' # UTC-5 + result_df = f_663(articles, timezone) + + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 14.0, 'Sports': 21.0, 'Technology': 7.0}, + # Converting 12:00 from respective timezones to New York time + 'min': {'Health': 14, 'Sports': 21, 'Technology': 7}, + 'max': {'Health': 14, 'Sports': 21, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + expected_df.index.name = 'category' + + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + + pd.testing.assert_frame_equal(result_df, expected_df) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_674_simon_chien_edit.py b/data/clean/f_674_simon_chien_edit.py new file mode 100644 index 00000000..02d37501 --- /dev/null +++ b/data/clean/f_674_simon_chien_edit.py @@ -0,0 +1,132 @@ +import collections +import numpy as np + + +def f_674(file_name): + """ + Find the most common value in each column of a csv file with column names. + + If some values occur the same number of times, the values are sorted + alphabetically and the first is considered most common. + + If an empty csv is passed, an empty dictionary is returned. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + dict: A dictionary with column names as keys and most common values as values. + + Requirements: + - collections + - numpy + + Example: + >>> common_values = f_674('sample.csv') + >>> print(common_values) + {'Name': 'Simon Velasquez', + 'Age': 21, + 'Fruit': 'Apple', + 'Genre': 'HipHop', + 'Height': 172} + + >>> common_values = f_674('test.csv') + >>> print(common_values) + {'Object': 'Chair', + 'Weight': '211kg', + 'Dancing Style': 'Waltz',} + """ + data = np.genfromtxt(file_name, delimiter=',', names=True, + dtype=None, encoding=None) + common_values = {} + + if len(np.atleast_1d(data)) == 0: + return {} + + if len(np.atleast_1d(data)) == 1: + for col in data.dtype.names: + common_values[col] = data[col].item() + + else: + for col in data.dtype.names: + counter = collections.Counter(data[col]) + if counter.most_common(2)[0][1] == counter.most_common(2)[1][1]: + common_values[col] = sorted(counter.items())[0][0] + else: + common_values[col] = counter.most_common(1)[0][0] + + return common_values + + +import unittest +import os +import shutil +import tempfile +import csv + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to house the CSV files + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_csv(self, file_name, headers, data): + # Helper function to create a CSV file + path = os.path.join(self.test_dir, file_name) + with open(path, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=headers) + writer.writeheader() + for row in data: + writer.writerow(row) + return path + + def test_empty_csv(self): + # Test for an empty CSV file + file_path = self.create_csv('empty.csv', ['Name', 'Age'], []) + result = f_674(file_path) + self.assertEqual(result, {}) + + def test_single_entry(self): + # Test for a CSV file with a single entry + file_path = self.create_csv('single.csv', ['Name', 'Age'], [{'Name': 'John', 'Age': '30'}]) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'John', 'Age': 30}) + + def test_common_values_sorted(self): + # Test for common values, ensuring alphabetical sorting + file_path = self.create_csv('common_values.csv', ['Fruit'], [{'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Cherry'}]) + result = f_674(file_path) + self.assertEqual(result, {'Fruit': 'Apple'}) + + def test_multiple_columns(self): + # Test for multiple columns and entries + data = [{'Name': 'Alice', 'Age': '25', 'Country': 'USA'}, + {'Name': 'Bob', 'Age': '30', 'Country': 'USA'}, + {'Name': 'Alice', 'Age': '25', 'Country': 'Canada'}] + file_path = self.create_csv('multi_columns.csv', ['Name', 'Age', 'Country'], data) + result = f_674(file_path) + expected = {'Name': 'Alice', 'Age': 25, 'Country': 'USA'} + self.assertEqual(result, expected) + + def test_tie_breaking(self): + # Test for tie-breaking in value counts + data = [{'Name': 'Alice'}, {'Name': 'Bob'}, {'Name': 'Alice'}, {'Name': 'Bob'}] + file_path = self.create_csv('tie.csv', ['Name'], data) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'Alice'}) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_675_simon_chien_edit.py b/data/clean/f_675_simon_chien_edit.py new file mode 100644 index 00000000..cb709a6b --- /dev/null +++ b/data/clean/f_675_simon_chien_edit.py @@ -0,0 +1,158 @@ +import pandas as pd +from sklearn.preprocessing import MinMaxScaler + + +def f_675(file_name: str) -> pd.DataFrame: + """Normalize data in a csv file using MinMaxScaler from sklearn. + Only numeric columns are normalized. Columns with other dtypes are left as + they are. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + DataFrame: A pandas DataFrame with normalized data. + + Raises: + ValueError: If input does not have numeric columns. + + Requirements: + - pandas + - sklearn.preprocessing.MinMaxScaler + + Example: + >>> normalized_data = f_675("sample.csv") + >>> print(normalized_data.head()) + + Name Age Salary + 0 Alex Anderson 0.304651 0.122298 + 1 Mr. Leslie Casey 0.28140 0.598905 + 2 Anthony George 0.996744 0.216552 + 3 Brian Washington 0.126279 0.459948 + 4 Elias Lawrence 0.337239 0.124185 + + >>> normalized_data = f_675("test.csv") + >>> print(normalized_data.head()) + + Fruit Weight Amount + 0 Aplple 1 0.5 + 1 Mr. Leslie Casey 0.32140 0.998905 + 2 Anthony George 0.8998888 0.123784 + 3 Brian Washington 0.121222 0.445321 + 4 Elias Lawrence 0.345223 0 + + """ + df = pd.read_csv(file_name) + if df.select_dtypes(include='number').empty: + raise ValueError("Input must at least have one numeric column.") + + scaler = MinMaxScaler() + numeric_columns = df.select_dtypes(include='number').columns + df[numeric_columns] = scaler.fit_transform(df[numeric_columns]) + + return df + + +import unittest +import pandas as pd +import tempfile +import os +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Set up a temporary directory + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Clean up by removing the directory + shutil.rmtree(self.test_dir) + + def create_csv(self, filename, data): + # Helper function to create a CSV file with the given data + full_path = os.path.join(self.test_dir, filename) + data.to_csv(full_path, index=False) + return full_path + + def test_non_numeric_and_empty(self): + # Test with non-numeric and empty data + non_numeric_df = pd.DataFrame({ + "Name": ["Alice", "Bob"], + "City": ["New York", "Los Angeles"] + }) + empty_df = pd.DataFrame() + + non_numeric_path = self.create_csv("non_numeric.csv", non_numeric_df) + empty_path = self.create_csv("empty.csv", empty_df) + + self.assertRaises(ValueError, f_675, non_numeric_path) + self.assertRaises(ValueError, f_675, empty_path) + + def test_single_row(self): + # Test with a single row of numeric data + single_row_df = pd.DataFrame({ + "Name": ["Olivia Anderson"], + "Age": [35], + "Salary": [58000] + }) + csv_path = self.create_csv("single_row.csv", single_row_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] == 0).all() and (df['Salary'] == 0).all()) + + def test_multiple_rows(self): + # Test multiple rows with numeric data + data_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000] + }) + csv_path = self.create_csv("multiple_rows.csv", data_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + + def test_mixed_columns(self): + # Test with a mix of numeric and non-numeric columns + mixed_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000], + "City": ["New York", "Chicago", "San Francisco"] + }) + csv_path = self.create_csv("mixed_columns.csv", mixed_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + self.assertTrue('City' in df.columns and df['City'].equals(mixed_df['City'])) + + def test_large_dataset(self): + # Test with a large dataset to ensure scalability + large_df = pd.DataFrame({ + "Age": range(10000), # Large range of ages + "Salary": range(10000, 20000) # Large range of salaries + }) + csv_path = self.create_csv("large_dataset.csv", large_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_694_simon_chien_edit.py b/data/clean/f_694_simon_chien_edit.py new file mode 100644 index 00000000..82d275fd --- /dev/null +++ b/data/clean/f_694_simon_chien_edit.py @@ -0,0 +1,116 @@ +import pandas as pd +import numpy as np + + +def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): + """ + Creates a CSV file on a given file path with random numeric data. + The number of rows in the CSV file is determined by the 'num_rows' parameter, + and the number of columns (features) is determined by the 'data_dimensions' parameter. + Columns are named following the convention: 'Feature_x', where x is the number of the + feature column starting at 1. + + Parameters: + file_path (str): The file path where the CSV file should be created. + num_rows (int): The number of rows of random data to generate. + data_dimensions (int, optional): The number of columns (features) in the CSV file. Defaults to 5. + random_seed (int, optional): Seed used in rng. Defaults to None. + + Returns: + str: The file path of the generated CSV file. + + Requirements: + - pandas + - numpy + + Example: + >>> f_694('/tmp/data.csv', 100) + '/tmp/data.csv' + + >>> f_694('test.csv', 5, 2, random_seed=42) + 'test.csv' + >>> pd.read_csv('test.csv') + Feature_1 Feature_2 + 0 0.154163 0.740050 + 1 0.918747 0.900715 + 2 0.283828 0.606083 + 3 0.521226 0.552038 + 4 0.764560 0.020810 + + """ + np.random.seed(random_seed) + df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), + columns=[f'Feature_{i + 1}' for i in range(data_dimensions)]) + + df.to_csv(file_path, index=False) + + return file_path + + +import unittest +import os +import pandas as pd +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory for each test case + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after each test + shutil.rmtree(self.test_dir) + + def test_basic_functionality(self): + # Test with default parameters + file_path = f_694(os.path.join(self.test_dir, 'data.csv'), 100) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 100) + self.assertEqual(len(df.columns), 5) + + def test_custom_dimensions(self): + # Test with custom dimensions + file_path = f_694(os.path.join(self.test_dir, 'data_custom.csv'), 50, 7) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 50) + self.assertEqual(len(df.columns), 7) + + def test_empty_file(self): + # Test generating an empty file + file_path = f_694(os.path.join(self.test_dir, 'empty.csv'), 0, 5) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 0) + + def test_random_seed(self): + # Test reproducibility with a random seed + file_path1 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + file_path2 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + df1 = pd.read_csv(file_path1) + df2 = pd.read_csv(file_path2) + pd.testing.assert_frame_equal(df1, df2) + + def test_no_columns(self): + # Test with zero columns + file_path = f_694(os.path.join(self.test_dir, 'no_columns.csv'), 10, 0) + self.assertTrue(os.path.exists(file_path)) + with open(file_path, 'r') as file: + data = file.read() + # Expect the file to contain only the headers or be empty + self.assertTrue(data == '' or all([x.strip() == '' for x in data.split(',')])) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_723_simon_chien_edit.py b/data/clean/f_723_simon_chien_edit.py new file mode 100644 index 00000000..12d176d7 --- /dev/null +++ b/data/clean/f_723_simon_chien_edit.py @@ -0,0 +1,137 @@ +import pandas as pd + +import pandas as pd +import random + + +def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, seed=42): + """ + Search for matches with a specified regex pattern in a given column of a CSV file and optionally return a random sample of these matches. + + The random sampling is implemented by generating a random list of integers which are used as indices. + The number of generated indices is given by sample_size. + + + Parameters: + csv_file (str): Path to the CSV file. + column_name (str, optional): The name of the column to search. Defaults to 'data'. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + sample_size (int, optional): Number of random samples to return from the matches. If None, all matches are returned. Defaults to None. + seed (int, optional): Seed for the random number generator for reproducibility. Defaults to 42. + + Returns: + DataFrame: A pandas DataFrame containing either all the rows with matches or a random sample of them. + + Requirements: + - pandas + - random: for generating the random list of indices + + Example: + >>> result = f_723('sample.csv', column_name='data', pattern='\d+[xX]', sample_size=10, seed=42) + >>> print(result) + index data + 210 211 Fund several agency oil. Evening plant thank t... + 45 46 Language interest four take old. Education if ... + 525 526 Action million cultural stand. Heart explain a... + 465 466 Security face clearly every could. Image beaut... + 430 431 Popular produce floor part soldier human. Youn... + 260 261 Customer game focus respond that central. Nigh... + 195 196 The writer parent. Life social house west ten ... + 165 166 Main hotel production nothing.\r\nCoach voice ... + 810 811 Early right nature technology. Conference mind... + 60 61 Interest require gas wall. Different it see fi... + + >>> result = f_723('sample.csv', column_name='data', sample_size=2) + >>> print(result) + index data + 125 126 Fund elephenat, the dinoasuar eat this language t... + 21 22 Such an important story banking at the house a da... + + + + """ + df = pd.read_csv(csv_file) + matches = df[df[column_name].str.contains(pattern, na=False)] + + if sample_size is not None: + random.seed(seed) # Set the seed for reproducibility + sample_size = min(sample_size, len(matches)) # Ensure sample size is not greater than the number of matches + sampled_indices = random.sample(range(len(matches)), sample_size) # Randomly select indices + matches = matches.iloc[sampled_indices] # Select rows corresponding to sampled indices + + return matches + + +import unittest +import pandas as pd +import tempfile +import shutil +import os + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to store the test CSV files + self.test_dir = tempfile.mkdtemp() + self.test_file = os.path.join(self.test_dir, "test_data.csv") + + # Create a sample DataFrame + data = { + "data": ["123x good", "no match here", "456X bad", "789x good", "ABC"], + "other_column": ["data1", "data2", "data3", "data4", "data5"] + } + self.df = pd.DataFrame(data) + self.df.to_csv(self.test_file, index=False) + + def tearDown(self): + # Remove temporary directory after the test + shutil.rmtree(self.test_dir) + + def test_default_parameters(self): + result = f_723(self.test_file) + expected_data = { + "data": ["123x good", "456X bad", "789x good"], + "other_column": ["data1", "data3", "data4"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + + def test_custom_column(self): + with self.assertRaises(KeyError): + f_723(self.test_file, column_name="nonexistent_column") + + def test_custom_pattern(self): + result = f_723(self.test_file, pattern='\d+X') + expected_data = { + "data": ["456X bad"], + "other_column": ["data3"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + + def test_sample_size(self): + result = f_723(self.test_file, sample_size=2, seed=42) + self.assertEqual(len(result), 2) + + def test_no_matches(self): + result = f_723(self.test_file, pattern="nope") + self.assertTrue(result.empty) + + def test_sample_size_larger_than_matches(self): + result = f_723(self.test_file, sample_size=10) + self.assertEqual(len(result), 3) # Only three matches exist + + def test_zero_sample_size(self): + result = f_723(self.test_file, sample_size=0) + self.assertTrue(result.empty) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_724_simon_chien_edit.py b/data/clean/f_724_simon_chien_edit.py new file mode 100644 index 00000000..b99369c3 --- /dev/null +++ b/data/clean/f_724_simon_chien_edit.py @@ -0,0 +1,146 @@ +import sqlite3 +import pandas as pd +import os + + +def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): + """ + Find all matches with a regex pattern in a list of strings in an SQL database. + + The function loads an sql database and selects all entries from the specified + table. All entries of the specified column are matched against a regex pattern. + Matches are returned in a DataFrame. + + Parameters: + db_file (str): The SQLite database file. + table_name (str): The name of the table to search. + column_name (str): The name of the column to search. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + + Returns: + DataFrame: A pandas DataFrame with the matches. + + Raises: + ValueError: If db_file does not exist. + + Requirements: + - sqlite3 + - pandas + - os + + Example: + >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column') + >>> print(result.head(10)) + id test_column + 0 1 4x4 car + 1 2 New 3x3 puzzle + 3 4 Product with 5X feature + 55 56 1xsafe + 56 57 3xmother + 57 58 5xenjoy + 58 59 2xhome + 59 60 3xanswer + 60 61 5xgirl + 61 62 5xkind + + >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column', pattern='kind') + >>> print(result) + id test_column + 20 21 To between successful ever ago PM toward today... + 42 43 Entire manage wife management perform size def... + 61 62 5xkind + """ + + if not os.path.isfile(db_file): + raise ValueError('db_file does not exist.') + + conn = sqlite3.connect(db_file) + df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn) + + if df[column_name].dtype == 'object': # Check if the column data type is a string + matches = df[df[column_name].str.contains(pattern)] + else: + matches = pd.DataFrame(columns=df.columns) # Return an empty DataFrame + + return matches + + +import unittest +import sqlite3 +import pandas as pd +import os +import tempfile + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to hold the database + self.test_dir = tempfile.mkdtemp() + self.db_path = os.path.join(self.test_dir, "test.db") + + # Set up a new database and populate it with initial data + self.conn = sqlite3.connect(self.db_path) + self.conn.execute("CREATE TABLE test_table (id INTEGER PRIMARY KEY, test_column TEXT)") + data = [ + (1, "4x4 car"), + (2, "New 3x3 puzzle"), + (3, "Product with 5X feature"), + (4, "1xsafe"), + (5, "3xmother") + ] + self.conn.executemany("INSERT INTO test_table (id, test_column) VALUES (?, ?)", data) + self.conn.commit() + + def tearDown(self): + # Close the connection and remove the temporary directory + self.conn.close() + os.remove(self.db_path) + os.rmdir(self.test_dir) + + def test_regular_expression_match(self): + # Test case with known data and expected matches + result = f_724(self.db_path, 'test_table', 'test_column') + expected = pd.DataFrame({ + 'id': [1, 2, 3, 4, 5], + 'test_column': ['4x4 car', 'New 3x3 puzzle', 'Product with 5X feature', '1xsafe', '3xmother'] + }, index=[0, 1, 2, 3, 4]) + pd.testing.assert_frame_equal(result, expected) + + def test_no_matches(self): + # Test case where no entries match the pattern + result = f_724(self.db_path, 'test_table', 'test_column', pattern='abc') + self.assertTrue(result.empty) + + def test_non_existent_table(self): + # Catch the OperationalError from sqlite directly + with self.assertRaises(pd.errors.DatabaseError): + f_724(self.db_path, 'fake_table', 'test_column') + + def test_non_existent_column(self): + # Catch the correct exception for non-existent column + with self.assertRaises(KeyError): + f_724(self.db_path, 'test_table', 'fake_column') + + def test_different_pattern(self): + # Test case with a different pattern + self.conn.execute("INSERT INTO test_table (id, test_column) VALUES (?, ?)", (6, "something 1ab2x")) + self.conn.commit() + result = f_724(self.db_path, 'test_table', 'test_column', pattern='1ab2x') + result.reset_index(drop=True, inplace=True) # Resetting index before comparison + expected = pd.DataFrame({ + 'id': [6], + 'test_column': ['something 1ab2x'] + }, index=[0]) + pd.testing.assert_frame_equal(result, expected) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_725_simon_chien_edit.py b/data/clean/f_725_simon_chien_edit.py new file mode 100644 index 00000000..aa38c947 --- /dev/null +++ b/data/clean/f_725_simon_chien_edit.py @@ -0,0 +1,122 @@ +import pandas as pd +from statsmodels.tsa.stattools import adfuller + + +def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool: + """ + Determines if a specific subset of data is stationary. + + Functionality: + 1. Filters rows where column_b > 50 and column_c == 900. + 2. Checks if the resulting subset of data in column_a is stationary using the Augmented Dickey-Fuller test. + 3. Returns True if the data is stationary, False otherwise. + + Data is considered to be stationary if the p_value returned by the + Augmented Dickey-Fuller test is smaller than 0.05. + + If column_a is empty after filtering or if its values are constant, True + is returned. + + Parameters: + df (pd.DataFrame): A DataFrame containing the data. + column_a (str): The name of the column to test for stationarity. + column_b (str): The name of the column used for filtering based on its value being greater than 50. + column_c (str): The name of the column used for filtering based on its value being equal to 900. + + Output: + bool: True if the data in column_a (after filtering based on column_b and column_c) is stationary, False otherwise. + + Requirements: + pandas + statsmodels: for using the adfuller test + + Example: + >>> df = pd.DataFrame({ + ... 'A': [1, 2, 3, 4, 5, 6], + ... 'B': [60, 70, 80, 90, 100, 110], + ... 'C': [900, 900, 900, 900, 900, 900] + ... }) + >>> f_725(df, 'A', 'B', 'C') + False + + >>> df = pd.DataFrame({ + ... 'TempA': [], + ... 'TempB': [], + ... 'TempC': [] + ... }) + >>> f_725(df, 'TempA', 'TempB', 'TempC') + True + """ + # Filter rows based on column_b and column_c + filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] + + if filtered_df[column_a].nunique() <= 1: + return True + + # If dataframe is empty after filtering, return False + if filtered_df.empty: + return True + + # Perform Augmented Dickey-Fuller test + adf_result = adfuller(filtered_df[column_a]) + p_value = adf_result[1] + return p_value <= 0.05 + + +import unittest +import os +import pandas as pd + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create DataFrame in setUp for test isolation + self.data = pd.DataFrame({ + 'A': list(range(100)), + 'B': [x * 2 for x in range(100)], + 'C': [900 if x % 2 == 0 else 800 for x in range(100)] + }) + + def test_constant_value(self): + # All values in column A are constant after filtering + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is constant.") + + def test_empty_after_filter(self): + # After filtering, no rows remain + result = f_725(self.data[self.data['B'] > 1000], 'A', 'B', 'C') + self.assertTrue(result, "Should be True as no data remains after filter.") + + def test_non_stationary_data(self): + # Test a clearly non-stationary dataset + result = f_725(self.data, 'A', 'B', 'C') + self.assertFalse(result, "Should be False as data is non-stationary.") + + def test_stationary_data(self): + # Test a stationary dataset + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is stationary.") + + def test_edge_case_small_dataset(self): + # Test a very small dataset + small_data = pd.DataFrame({ + 'A': [1, 1], + 'B': [60, 70], + 'C': [900, 900] + }) + result = f_725(small_data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True due to small dataset size or no variation.") + + +if __name__ == '__main__': + run_tests() diff --git a/data/clean/f_728_simon_chien_edit.py b/data/clean/f_728_simon_chien_edit.py new file mode 100644 index 00000000..e90e9ffb --- /dev/null +++ b/data/clean/f_728_simon_chien_edit.py @@ -0,0 +1,176 @@ +import pandas as pd +from collections import Counter + + +def f_728(data): + """ + Analyze a dictionary of student data to return a dataframe sorted by name and age in ascending order, + the average score per student as a pandas Series, and the most common age as an integer. + + Parameters: + data (dict): A dictionary containing student data with three keys: + - 'Name': List of student names. + - 'Age': List of student ages. + - 'Score': List of student scores. + + Returns: + pd.DataFrame, pd.Series, int or None: + - A dataframe sorted by 'Name' and 'Age' in ascending order. + - A series representing average scores indexed by student names. + - An integer representing the most common age or None if no data is available. + + Raises: + ValueError: If the dictionary does not have the required keys. + + Requirements: + - pandas + - collections + + Example: + >>> data = { + ... 'Name': ['Tom', 'Nick', 'John', 'Tom', 'John', 'John', 'Nick', 'Tom', 'John', 'Tom'], + ... 'Age': [20, 21, 19, 20, 19, 19, 21, 20, 19, 20], + ... 'Score': [85, 79, 92, 88, 90, 92, 81, 86, 90, 85] + ... } + >>> df, avg_scores, common_age = f_728(data) + >>> print(df) + Name Age Score + 2 John 19 92 + 4 John 19 90 + 5 John 19 92 + 8 John 19 90 + 1 Nick 21 79 + 6 Nick 21 81 + 0 Tom 20 85 + 3 Tom 20 88 + 7 Tom 20 86 + 9 Tom 20 85 + + >>> print(avg_scores) + Name + John 91.0 + Nick 80.0 + Tom 86.0 + Name: Score, dtype: float64 + >>> print(common_age) + 19 + + >>> data = { + ... 'Name': ['Simon', 'Alex', 'Tanja', 'Amanda', 'Tanja'], + ... 'Age': [21, 42, 54, 20, 54], + ... 'Score': [1, 1, 2, 3, 5] + ... } + >>> df, avg_scores, common_age = f_728(data) + >>> print(df) + Name Age Score + 1 Alex 42 1 + 3 Amanda 20 3 + 0 Simon 21 1 + 2 Tanja 54 2 + 4 Tanja 54 5 + >>> print(avg_scores) + Name + Alex 1.0 + Amanda 3.0 + Simon 1.0 + Tanja 3.5 + Name: Score, dtype: float64 + >>> print(common_age) + 54 + """ + + if not all(key in data for key in ['Name', 'Age', 'Score']): + raise ValueError("The dictionary must have the keys 'Name', 'Age', 'Score'") + + # Creating a dataframe and sorting it + df = pd.DataFrame(data).sort_values(['Name', 'Age']) + + # Calculating average scores + avg_scores = df.groupby('Name')['Score'].mean() + + # Getting the most common age + age_counts = Counter(df['Age']) + most_common_age = age_counts.most_common(1)[0][0] if age_counts else None + + return df, avg_scores, most_common_age + + +import unittest +import pandas as pd +import os + + +class TestCases(unittest.TestCase): + + def test_wrong_keys(self): + # Testing with incorrect dictionary keys + data = { + 'Names': ['Tom', 'Nick'], + 'Ages': [20, 21], + 'Scores': [85, 79] + } + with self.assertRaises(ValueError): + f_728(data) + + def test_correct_processing(self): + # Testing with correctly formatted data + data = { + 'Name': ['Tom', 'Nick', 'Tom', 'John'], + 'Age': [20, 21, 20, 19], + 'Score': [85, 79, 88, 92] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(df.iloc[0]['Name'], 'John') + self.assertAlmostEqual(avg_scores['Tom'], 86.5) + self.assertEqual(common_age, 20) + + def test_empty_data(self): + # Testing with empty lists + data = {'Name': [], 'Age': [], 'Score': []} + df, avg_scores, common_age = f_728(data) + self.assertTrue(df.empty) + self.assertTrue(avg_scores.empty) + self.assertIsNone(common_age) + + def test_all_same_age(self): + # Testing with all students having the same age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [25, 25, 25], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 25) + + def test_no_common_age(self): + # Testing with no common age, each student has a unique age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [24, 25, 26], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 24) # Assuming the first element is taken if all are equally common + + def test_duplicate_names_different_ages(self): + # Testing with duplicate names but different ages + data = { + 'Name': ['Tom', 'Tom', 'Nick'], + 'Age': [20, 21, 21], + 'Score': [85, 88, 79] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(len(df[df['Name'] == 'Tom']), 2) + self.assertNotEqual(df.iloc[0]['Age'], df.iloc[1]['Age']) + self.assertTrue(df[df['Name'] == 'Tom'].Age.isin([20, 21]).all()) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_730_simon_chien_edit.py b/data/clean/f_730_simon_chien_edit.py new file mode 100644 index 00000000..57c2ed19 --- /dev/null +++ b/data/clean/f_730_simon_chien_edit.py @@ -0,0 +1,128 @@ +import pandas as pd +import os + + +def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: + """ + Merge / Concatenate multiple CSV files from a specified directory into a single Pandas DataFrame. + + If an empty list of files is passed, an empty DataFrame is returned. + + Parameters: + data_dir (str): The directory path where the CSV files are located. + csv_files (list): A list of CSV file names to be merged. + + Returns: + pd.DataFrame: A pandas DataFrame with the merged data. + + Requirements: + - pandas + - os + + Example: + >>> df = f_730('/path/to/data/directory', ['file1.csv', 'file2.csv', 'file3.csv']) + >>> print(df.head()) + Name Age Gender + 0 Simon 5 Male + 1 Bobby 32 Male + 0 Elena 13 Female + 1 Tom 23 Male + 0 Franko 12 Male + + >>> df = f_730('/path/to/data/directory', ['file1.csv', 'other_file.csv']) + >>> print(df.head()) + Name Age Gender Animal Size + 0 Simon 5 Male None None + 1 Bobby 32 Male None None + 0 Elena 13 Female None None + 2 None None None Tiger 12 + """ + merged_df = pd.DataFrame() + + for file in csv_files: + file_path = os.path.join(data_dir, file) + df = pd.read_csv(file_path) + merged_df = pd.concat([merged_df, df], ignore_index=True) + + return merged_df + + +import unittest +import pandas as pd +import os +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to hold CSV files + self.test_dir = tempfile.mkdtemp() + self.files = { + 'file1.csv': pd.DataFrame({ + 'Name': ['Alice', 'Bob'], + 'Age': [25, 30] + }), + 'file2.csv': pd.DataFrame({ + 'Name': ['Charlie'], + 'Age': [35] + }), + 'file3.csv': pd.DataFrame({ + 'Name': ['David', 'Eve'], + 'Age': [45, 55], + 'Gender': ['Male', 'Female'] + }), + 'file4.csv': pd.DataFrame({ + 'Name': ['Faythe'], + 'Animal': ['Cat'] + }) + } + # Write files to disk + for filename, df in self.files.items(): + df.to_csv(os.path.join(self.test_dir, filename), index=False) + + def tearDown(self): + # Clean up the temporary directory + shutil.rmtree(self.test_dir) + + def test_with_multiple_files(self): + # Test merging multiple files + result = f_730(self.test_dir, ['file1.csv', 'file2.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file2.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + + def test_with_different_columns(self): + # Test files with different columns + result = f_730(self.test_dir, ['file1.csv', 'file3.csv', 'file4.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file3.csv'], self.files['file4.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + + def test_with_empty_list(self): + # Test with an empty list of files + result = f_730(self.test_dir, []) + self.assertTrue(result.empty) + + def test_with_nonexistent_file(self): + # Test referencing a non-existent file + with self.assertRaises(FileNotFoundError): + f_730(self.test_dir, ['nonexistent.csv']) + + def test_single_file(self): + # Test with a single file + result = f_730(self.test_dir, ['file2.csv']) + expected_df = self.files['file2.csv'] + pd.testing.assert_frame_equal(result, expected_df) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_731_simon_chien_edit.py b/data/clean/f_731_simon_chien_edit.py new file mode 100644 index 00000000..371f48c9 --- /dev/null +++ b/data/clean/f_731_simon_chien_edit.py @@ -0,0 +1,151 @@ +import os +import pandas as pd +import numpy as np + + +def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: + """ + Load a CSV file into a pandas DataFrame and replace the NaN values in + numeric columns with the mean of the corresponding column. + The resulting DataFrame is returned. + + If an empty csv is passed, an empty DataFrame is returned. + + Parameters: + - data_dir (str): The path to the directory containing the CSV file. + - csv_file (str): The name of the CSV file to be processed. + + Returns: + pd.DataFrame: A pandas DataFrame with the processed data. + + Raises: + FileNotFoundError: If csv_file does not exist. + + Requirements: + - os + - pandas + - numpy + + Example: + >>> df = f_731("/path/to/data/directory", "file.csv") + >>> print(df) + Fruit Taste Cost + 0 Apple Good 1 + 1 Orange NaN 2 + 2 Avocado Bad 1.667 + 3 Coconut Tasty 2 + + >>> df = f_731("/path/to/data/directory", "test.csv") + >>> print(df) + Name Score + 0 Alex 25.2 + 1 Tanja 31.5 + 2 Maine 99 + 3 Lisa 100 + 4 Simone 63.925 + """ + file_path = os.path.join(data_dir, csv_file) + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return pd.DataFrame() + + for column in df.columns: + if np.issubdtype(df[column].dtype, np.number): # checking for numeric columns + df[column].fillna(df[column].mean(), inplace=True) + + return df + + +import unittest +import pandas as pd +import numpy as np +import os +import tempfile +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + self.folder_path = 'f_731_data_simon' + + def setUp(self): + # Create a temporary directory for test data + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_csv(self, filename, data): + # Helper method to create a CSV file + filepath = os.path.join(self.test_dir, filename) + data.to_csv(filepath, index=False) + return filename + + def test_empty_csv(self): + # Test with an empty CSV file + filename = self.create_csv('empty.csv', pd.DataFrame()) + result = f_731(self.test_dir, filename) + self.assertTrue(result.empty) + + def test_numeric_columns_nan_replacement(self): + data = pd.DataFrame({ + 'Age': [25, np.nan, 30], + 'Salary': [50000, 60000, np.nan] + }) + filename = self.create_csv('data.csv', data) + expected = pd.DataFrame({ + 'Age': [25.0, 27.5, 30.0], # Ensure all ages are floats + 'Salary': [50000.0, 60000.0, 55000.0] # Ensure all salaries are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + + def test_mixed_columns(self): + data = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [np.nan, 88, 92] + }) + filename = self.create_csv('mixed.csv', data) + expected = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [90.0, 88.0, 92.0] # Ensure all scores are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + + def test_all_nan_column(self): + # Test with a column that is entirely NaN + data = pd.DataFrame({ + 'Empty': [np.nan, np.nan, np.nan] + }) + filename = self.create_csv('all_nan.csv', data) + result = f_731(self.test_dir, filename) + self.assertTrue(result['Empty'].isnull().all()) + + def test_no_numeric_data(self): + # Test a CSV file with no numeric data + data = pd.DataFrame({ + 'City': ['New York', 'Los Angeles', 'Chicago'] + }) + filename = self.create_csv('cities.csv', data) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, data) + + def test_file_not_found(self): + # Test the FileNotFoundError + with self.assertRaises(FileNotFoundError): + f_731(self.test_dir, "non_existent.csv") + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_732_simon_chien_edit.py b/data/clean/f_732_simon_chien_edit.py new file mode 100644 index 00000000..71a5bb53 --- /dev/null +++ b/data/clean/f_732_simon_chien_edit.py @@ -0,0 +1,143 @@ +import os +import random +import pandas as pd + + +def f_732(data_dir, + csv_files=['file1.csv', 'file2.csv', 'file3.csv'], + seed=None): + """ + Randomly select one of the provided csv_files and select a certain number + of records from the file at random. + The selected records are returned in a DataFrame. + The name of the selected csv_file is also returned. + + If the csv_file is empty return an empty DataFrame. + + Parameters: + data_dir (str): The directory where the CSV files are located. + csv_files (list of str): The list of CSV files to choose from. Default is ['file1.csv', 'file2.csv', 'file3.csv']. + seed (int, optional): Seed for random number generation and for sampling from the csv. + + Returns: + tuple: A tuple containing two elements: + - str: The name of the randomly selected file. + - DataFrame: A pandas DataFrame with the selected rows. + + Requirements: + - os + - random + - pandas + + Example: + >>> file_name, df = f_732('test_data') + >>> print(file_name) + 'file2.csv' + >>> print(df) + Animal Weight + 0 Cat 1 + 21 Mouse 12 + 15 Elephant 1000 + 2 Tiger 500 + + >>> file_name, df = f_732('data', csv_files=['test1.csv', 'test2.csv'], seed=42) + >>> print(file_name) + 'test1.csv' + >>> print(df) + Name House Salary + 12 Simba mansion 11111 + 231 Dolores mansion 2222 + 135 Elaine shed 93274 + 21 Sophia garden 111 + """ + + random.seed(seed) + + file = csv_files[random.randint(0, len(csv_files) - 1)] + file_path = os.path.join(data_dir, file) + + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return file, pd.DataFrame() + + selected_rows = df.sample(n=random.randint(1, len(df)), random_state=seed) + + return file, selected_rows + + +import unittest +import pandas as pd +import os +import tempfile +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory + self.test_dir = tempfile.mkdtemp() + self.test_files = [ + 'file1.csv', 'file2.csv', 'file3.csv', 'file4.csv', 'file5.csv', 'empty.csv' + ] + # Sample data for CSV files + data = { + 'file1.csv': pd.DataFrame({'Name': ['Alice', 'Bob'], 'Age': [25, 30]}), + 'file2.csv': pd.DataFrame({'Name': ['Chris', 'Dana'], 'Age': [35, 40]}), + 'file3.csv': pd.DataFrame({'Name': ['Eve', 'Frank'], 'Age': [45, 50]}), + 'file4.csv': pd.DataFrame({'Name': ['Grace', 'Hank'], 'Age': [55, 60]}), + 'file5.csv': pd.DataFrame({'Name': ['Ivan', 'Julia'], 'Age': [65, 70]}), + 'empty.csv': pd.DataFrame() + } + # Create CSV files in the directory + for file_name, df in data.items(): + df.to_csv(os.path.join(self.test_dir, file_name), index=False) + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self.test_dir) + + def test_random_selection(self): + # Testing random selection and ensuring the file chosen and its data are correct + file_name, df = f_732(self.test_dir, seed=42) + self.assertTrue(file_name in self.test_files) + self.assertFalse(df.empty) + + def test_specific_file_selection(self): + # Test selecting a specific file and checking contents + file_name, df = f_732(self.test_dir, ['file1.csv'], seed=42) + expected = pd.read_csv(os.path.join(self.test_dir, 'file1.csv')) + # Sample from expected and reset index + expected_sampled = expected.sample(len(df), random_state=42).reset_index(drop=True) + # Reset index of df to ensure indices match + df_reset = df.reset_index(drop=True) + # Assert frame equality + pd.testing.assert_frame_equal(df_reset, expected_sampled) + + def test_empty_file(self): + # Ensure an empty file returns an empty DataFrame + file_name, df = f_732(self.test_dir, ['empty.csv'], seed=42) + self.assertEqual(file_name, 'empty.csv') + self.assertTrue(df.empty) + + def test_multiple_files(self): + # Testing selection from multiple files + file_name, df = f_732(self.test_dir, ['file3.csv', 'file4.csv'], seed=24) + self.assertIn(file_name, ['file3.csv', 'file4.csv']) + self.assertFalse(df.empty) + + def test_no_file_matches(self): + # Testing behavior when no files match the list + with self.assertRaises(FileNotFoundError): + f_732(self.test_dir, ['nonexistent.csv'], seed=42) + + +if __name__ == "__main__": + run_tests() diff --git a/data/clean/f_733_simon_chien_edit.py b/data/clean/f_733_simon_chien_edit.py new file mode 100644 index 00000000..6a0f3406 --- /dev/null +++ b/data/clean/f_733_simon_chien_edit.py @@ -0,0 +1,126 @@ +import pandas as pd +from sklearn.linear_model import LinearRegression +from sklearn.model_selection import train_test_split + + +def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): + """ + Train a linear regression model on a dataset and predict the value of a particular attribute. + This function reads a CSV file to create a pandas DataFrame, separates the data into + training and testing sets, and performs linear regression. It returns the predicted + values for the testing set as well as the trained model. + + Parameters: + csv_file_path (str): The path to the CSV file containing the data set. + attribute (str): The attribute to predict. + test_size (float, optional): Proportion of the dataset to include in the test split. Default is 0.2. + random_state (int, optional): Seed used by the random number generator. Default is 42. + + Returns: + tuple: A tuple containing: + - model (LinearRegression): The trained linear regression model. + - predictions (ndarray): An array of predicted values for the test set. + + Requirements: + - pandas + - sklearn.linear_model + - sklearn.model_selection + + Example: + >>> model, predictions = f_733("/path/to/data.csv", "target") + >>> print(predictions) + [123.45, ..., 126.78] + + >>> model, predictions = f_733("/path/to/test.csv", "target") + >>> print(predictions) + [1.2423, 4.2313, 28.2219, 10.3092] + + Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. + """ + df = pd.read_csv(csv_file_path) + X = df.drop(columns=[attribute]) + y = df[attribute] + + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=test_size, random_state=random_state + ) + + model = LinearRegression() + model.fit(X_train, y_train) + + predictions = model.predict(X_test) + return model, predictions + + +import unittest +import numpy as np +import pandas as pd +import tempfile +import os +from sklearn.linear_model import LinearRegression + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary CSV file to simulate test environments + self.temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False, suffix='.csv') + self.csv_file_path = self.temp_file.name + self.temp_file.close() # Close the file immediately after creation + + def tearDown(self): + # Remove the temporary file after the test + os.unlink(self.csv_file_path) + + def create_csv(self, data, header=True): + # Utility to create CSV content + df = pd.DataFrame(data) + df.to_csv(self.csv_file_path, index=False, header=header) + + def test_valid_data(self): + # Valid CSV and attribute + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target") + self.assertIsInstance(model, LinearRegression) + self.assertIsInstance(predictions, np.ndarray) + self.assertEqual(len(predictions), 1) # 20% of 3 is 0.6, rounds to 1 + + def test_different_test_size(self): + # Changing the test size + data = {'feature1': range(10), 'feature2': range(10, 20), 'target': range(20, 30)} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target", test_size=0.3) + self.assertEqual(len(predictions), 3) # 30% of 10 is 3 + + def test_invalid_attribute(self): + # Attribute not present in the CSV + data = {'feature1': [1, 2], 'feature2': [3, 4]} + self.create_csv(data) + with self.assertRaises(KeyError): + f_733(self.csv_file_path, "nonexistent_target") + + def test_csv_with_missing_values(self): + # CSV containing missing values in features + data = {'feature1': [1, np.nan, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") + + def test_predicting_non_numerical_data(self): + # Non-numerical data in target + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': ['a', 'b', 'c']} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") + + +def run_tests(): + # Function to execute the test cases + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() From 72971d40e3cd07cc0a600f7796b57646d7658591 Mon Sep 17 00:00:00 2001 From: vumichien Date: Thu, 25 Apr 2024 11:14:53 +0900 Subject: [PATCH 2/3] run eval script --- CONTRIBUTING.md | 4 +- .../f_657_simon_chien_edit_wo_doc.py | 120 +++++++++++++ .../f_661_simon_chien_edit_wo_doc.py | 107 +++++++++++ .../processed/f_663_simon_chien_edit_w_doc.py | 168 ++++++++++++++++++ .../f_674_simon_chien_edit_wo_doc.py | 110 ++++++++++++ .../f_675_simon_chien_edit_wo_doc.py | 130 ++++++++++++++ .../f_694_simon_chien_edit_wo_doc.py | 95 ++++++++++ .../f_723_simon_chien_edit_wo_doc.py | 114 ++++++++++++ .../f_724_simon_chien_edit_wo_doc.py | 124 +++++++++++++ .../processed/f_725_simon_chien_edit_w_doc.py | 102 +++++++++++ .../processed/f_728_simon_chien_edit_w_doc.py | 156 ++++++++++++++++ .../f_730_simon_chien_edit_wo_doc.py | 107 +++++++++++ .../f_731_simon_chien_edit_wo_doc.py | 127 +++++++++++++ .../f_732_simon_chien_edit_wo_doc.py | 123 +++++++++++++ .../f_733_simon_chien_edit_wo_doc.py | 104 +++++++++++ 15 files changed, 1689 insertions(+), 2 deletions(-) create mode 100644 data/processed/f_657_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_661_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_663_simon_chien_edit_w_doc.py create mode 100644 data/processed/f_674_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_675_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_694_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_723_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_724_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_725_simon_chien_edit_w_doc.py create mode 100644 data/processed/f_728_simon_chien_edit_w_doc.py create mode 100644 data/processed/f_730_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_731_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_732_simon_chien_edit_wo_doc.py create mode 100644 data/processed/f_733_simon_chien_edit_wo_doc.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1bdfeddf..15ad465d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -219,11 +219,11 @@ pip install -r requirements.txt If you notice any third-party libraries that are not included in the `requirements.txt` file but used in the `data/process.py` file, please add them with the compatible versions to the `requirements.txt` file. ### How to Validate Data? -We build a GitHub action to validate the data. The action is based on the `script/bash.sh`. Specifically, any refined data will be copied to the `data/clean` folder and then parsed based on `script/parser.py`. The parsed data will be stored in the `data/processed` folder. The parsed data will be separate into two splits for `pytest`. The first split will be validated by running `pytest $FILE_NAME` and the second split will be validated by running `pytest --doctest-modules $FILE_NAME`. Please note that we validate each file separately, as `pytest` may fail unexpectedly when validating all files at once. +We build a GitHub action to validate the data. The action is based on the `script/run.sh`. Specifically, any refined data will be copied to the `data/clean` folder and then parsed based on `script/parser.py`. The parsed data will be stored in the `data/processed` folder. The parsed data will be separate into two splits for `pytest`. The first split will be validated by running `pytest $FILE_NAME` and the second split will be validated by running `pytest --doctest-modules $FILE_NAME`. Please note that we validate each file separately, as `pytest` may fail unexpectedly when validating all files at once. If you want to validate the data locally, you can run the following command: ```bash -sh script/bash.sh +sh script/run.sh ``` If you find any failed test cases, please fix the data in the `data/raw` folder based on the failed problem IDs. The refinement should be based on the [How to Refine Data?](#how-to-refine-data) section. \ No newline at end of file diff --git a/data/processed/f_657_simon_chien_edit_wo_doc.py b/data/processed/f_657_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..b476c896 --- /dev/null +++ b/data/processed/f_657_simon_chien_edit_wo_doc.py @@ -0,0 +1,120 @@ +import re +import os +import glob + + +def f_657(dir_path): + """ + Search for occurrences of the word "error" in all text files within a + specified directory and its subdirectories. + + Parameters: + dir_path (str): The path of the directory. + + Returns: + dict: A dictionary with relative file paths as keys and the count of + occurrences of the word "error" as values. + + Raises: + - ValueError: If directory in dir_path does not exist. + + Requirements: + - re: For regex pattern matching. + - os: For retrieving relative file paths. + - glob: For fetching all text file paths in the directory. + + The function specifically searches for the word "error" in text files + (with the extension ".txt"). + This function is NOT case sensitive, e.g. also "ERROr" will be counted. + + Example: + >>> f_657("/path/to/directory") + {'file1.txt': 2, 'subdir/file2.txt': 1} + + >>> f_657("/path/to/directory") + {'test.txt': 245, 'subdir/test2.txt': 0, 'subdir/sf/test3.txt': 1} + """ + + if not os.path.isdir(dir_path): + raise ValueError("Specified directory does not exist.") + + result = {} + file_paths = glob.glob(f'{dir_path}/**/*.txt', recursive=True) + for file_path in file_paths: + with open(file_path, 'r') as file: + content = file.read() + matches = re.findall(r'\berror\b', content, re.IGNORECASE) + # Always set the file's count in the result dictionary, even if it's 0 + result[os.path.relpath(file_path, dir_path)] = len(matches) + + return result + +import unittest +import os +import shutil +import tempfile +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to simulate test environments + self.test_dir = tempfile.mkdtemp() + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + def create_file(self, sub_path, content=""): + # Helper method to create a file with given content + full_path = os.path.join(self.test_dir, sub_path) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, 'w') as file: + file.write(content) + # Return normalized path for cross-platform compatibility + return os.path.normpath(sub_path) + def test_non_existent(self): + # Expect ValueError for non-existent directory + with self.assertRaises(ValueError): + f_657(os.path.join(self.test_dir, "non_existent")) + def test_empty_folder(self): + # Test empty directory + result = f_657(self.test_dir) + self.assertEqual(result, {}) + def test_files_with_errors(self): + # Files with varying counts of 'error' + files = { + "1.txt": "error\nERROR\nErrOr", + "subfolder1/2.txt": "", + "subfolder2/3.txt": "error\nerror error" + } + expected = { + os.path.normpath("1.txt"): 3, + os.path.normpath("subfolder1/2.txt"): 0, + os.path.normpath("subfolder2/3.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + result = f_657(self.test_dir) + self.assertEqual(result, expected) + def test_case_sensitive_and_realistic_text(self): + # More complex scenarios, including nested directories + file_path = self.create_file('nested/folder1/folder2/error_log.txt', 'Error\nerror\nERROR') + expected = {file_path: 3} + result = f_657(self.test_dir) + self.assertEqual(result, expected) + def test_exact_word_matching(self): + # Ensure only the exact word 'error' is counted and ignore similar words like 'errors' + files = { + "file1.txt": "error error error", # Should count 3 times + "subdir/file2.txt": "errors error erro errors", # Should count 1 time + "subdir2/nested/file3.txt": "an error occurred", # Should count 1 time + "subdir3/file4.txt": "no errors here", # Should count 0 times + "subdir3/file5.txt": "Error and ERROR and error" # Should count 3 times, case insensitive + } + expected = { + os.path.normpath("file1.txt"): 3, + os.path.normpath("subdir/file2.txt"): 1, + os.path.normpath("subdir2/nested/file3.txt"): 1, + os.path.normpath("subdir3/file4.txt"): 0, + os.path.normpath("subdir3/file5.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + result = f_657(self.test_dir) + self.assertEqual(result, expected) diff --git a/data/processed/f_661_simon_chien_edit_wo_doc.py b/data/processed/f_661_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..73b22c14 --- /dev/null +++ b/data/processed/f_661_simon_chien_edit_wo_doc.py @@ -0,0 +1,107 @@ +import pandas as pd +from sklearn.linear_model import LinearRegression + + +def f_661(file_path, output_path=None, sort_key='title', linear_regression=False, x_column=None, y_column=None): + """ + Sorts a CSV file by a specific column key using pandas, and optionally writes the sorted data to another CSV file. + Can also fit a linear regression model to specified columns if required. + + Parameters: + file_path (str): The path to the input CSV file. This parameter is required. + output_path (str): The path where the sorted CSV will be saved. If not provided, the function won't save the sorted dataframe. + sort_key (str): The column name used as a key to sort the CSV file. Defaults to 'title'. + linear_regression (bool): If True, fits a linear regression model to the specified columns. Defaults to False. + x_column (str): The name of the column to use as the predictor variable for linear regression. + y_column (str): The name of the column to use as the response variable for linear regression. + + Returns: DataFrame, str, or LinearRegression model: The sorted pandas DataFrame if 'output_path' is None and + 'linear_regression' is False, otherwise the path to the saved output file. If 'linear_regression' is True, + returns the fitted model. + + Requirements: + - pandas + - scikit-learn + + Example: + >>> model = f_661('data.csv', sort_key='title', linear_regression=True, x_column='age', y_column='salary') + >>> # Returns a fitted LinearRegression model based on 'age' and 'salary' columns. + + Raises: + Exception: If there is an error in reading, sorting the data, or fitting the model. + """ + try: + df = pd.read_csv(file_path) + df.sort_values(by=[sort_key], inplace=True) + + if linear_regression: + if x_column not in df.columns or y_column not in df.columns: + raise ValueError("Specified columns for linear regression do not exist in the dataframe") + + X = df[[x_column]] + y = df[y_column] + model = LinearRegression().fit(X, y) + return model + + if output_path: + df.to_csv(output_path, index=False) + return output_path + else: + return df + except Exception as e: + raise Exception(f"Error while processing the file: {str(e)}") + +import unittest +import pandas as pd +import numpy as np +import os +import shutil +import tempfile +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory for test files + self.test_dir = tempfile.mkdtemp() + self.test_csv_path = os.path.join(self.test_dir, 'test_data.csv') + # Create a sample CSV file + df = pd.DataFrame({ + 'title': ['Book C', 'Book A', 'Book B'], + 'x': [1, 2, 3], + 'y': [5, 7, 9] + }) + df.to_csv(self.test_csv_path, index=False) + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + def test_valid_input_no_output_path(self): + # Test with valid input, no output file specified (should return DataFrame) + df = f_661(self.test_csv_path, sort_key='title') + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue(df['title'].is_monotonic_increasing) + def test_invalid_file_path(self): + # Test with invalid file path (should raise an exception) + with self.assertRaises(Exception): + f_661(os.path.join(self.test_dir, 'non_existent.csv')) + def test_invalid_sort_key(self): + # Test with invalid sort key (should raise an exception) + with self.assertRaises(Exception): + f_661(self.test_csv_path, sort_key='non_existent_column') + def test_output_data_saving(self): + # Test if the function saves the sorted data correctly when an output path is provided + output_path = os.path.join(self.test_dir, 'sorted_data.csv') + result_path = f_661(self.test_csv_path, output_path=output_path, sort_key='title') + self.assertEqual(result_path, output_path) + # Check if the file is created and is not empty + self.assertTrue(os.path.exists(output_path)) + self.assertGreater(os.stat(output_path).st_size, 0) + def test_linear_regression_functionality(self): + # Test if linear regression model is fitted correctly + model = f_661(self.test_csv_path, linear_regression=True, x_column='x', y_column='y') + self.assertIsInstance(model, LinearRegression) + # Check if coefficients are as expected (approximate) + np.testing.assert_almost_equal(model.coef_, [2], decimal=1) + np.testing.assert_almost_equal(model.intercept_, 3, decimal=1) + def test_linear_regression_error_on_invalid_columns(self): + # Test error handling for non-existent columns in linear regression + with self.assertRaises(Exception) as context: + f_661(self.test_csv_path, linear_regression=True, x_column='nonexistent', y_column='title') + self.assertIn("Specified columns for linear regression do not exist in the dataframe", str(context.exception)) diff --git a/data/processed/f_663_simon_chien_edit_w_doc.py b/data/processed/f_663_simon_chien_edit_w_doc.py new file mode 100644 index 00000000..1550b6d2 --- /dev/null +++ b/data/processed/f_663_simon_chien_edit_w_doc.py @@ -0,0 +1,168 @@ +import pandas as pd +import pytz + + +def f_663(articles, timezone): + """ + Analyze the publication times of a list of articles: + 1) Convert 'published_time' to a specified timezone + 2) Group articles by 'category' + 3) For each category, calculate the count, mean, min, max publication times only considering the hour. + + Parameters: + articles (list): A list of dictionaries where each dictionary represents + an article with keys 'title', 'title_url', 'id', 'category', and 'published_time' (in UTC). + timezone (str): The string representation of the timezone to which the 'published_time' should be converted. + + Returns: + DataFrame: A pandas DataFrame with the count, mean, min, max publication hour for each category. + The category is the index of the DataFrame. + + Raises: + ValueError: If dictionary keys do not match the requirements. + TypeError: If articles is not a list of dictionaries. + ValueError: If an empty list is passed as articles. + + Requirements: + - pandas + - pytz + + Example: + >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 1 3.0 3 3 + Sports 1 19.0 19 19 + Technology 1 8.0 8 8 + + >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 1 3.0 3 3 + Sports 1 19.0 19 19 + Technology 1 8.0 8 8 + + >>> articles = [ + ... {'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': '09:01:04.403278+00:00'}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': '02:03:04.403278+00:00'}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': '21:11:01.403278+00:00'}, + ... {'title': 'newsies', 'title_url': 'newsies.news', 'id': 21, 'category': 'Technology', 'published_time': '4:25:12.403278+00:00'}, + ... {'title': 'ORF', 'title_url': 'orf.at', 'id': 44, 'category': 'Health', 'published_time': '03:04:03.403278+00:00'}, + ... {'title': 'ARD', 'title_url': 'ard.com', 'id': 61, 'category': 'Health', 'published_time': '11:41:12.403278+00:00'}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 3 15.666667 7 23 + Sports 1 22.000000 22 22 + Technology 2 2.500000 0 5 + """ + + if not isinstance(articles, list): + raise TypeError("articles should be a list of dictionaries.") + + if not all(isinstance(item, dict) for item in articles): + raise TypeError("articles should be a list of dictionaries.") + + if len(articles) == 0: + raise ValueError("input articles list should contain at least one article.") + + if any(not sorted(dic.keys()) == ['category', 'id', 'published_time', 'title', 'title_url'] for dic in articles): + raise ValueError( + "input dictionaries must contain the following keys: 'category', 'id', 'title', 'title_url', 'published_time'") + + tz = pytz.timezone(timezone) + for article in articles: + article['published_time'] = pd.to_datetime(article['published_time']).astimezone(tz) + + df = pd.DataFrame(articles) + df['published_time'] = df['published_time'].dt.hour + + analysis_df = df.groupby('category')['published_time'].agg(['count', 'mean', 'min', 'max']) + + return analysis_df + +import unittest +import pandas as pd +import pytz +from datetime import datetime +class TestCases(unittest.TestCase): + def setUp(self): + self.articles = [ + {'title': 'Apple News', 'title_url': 'apple.com/news', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.UTC)}, + {'title': 'Sports Update', 'title_url': 'sports.com/update', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 15, 0, tzinfo=pytz.UTC)}, + {'title': 'Health Today', 'title_url': 'health.com/today', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 8, 0, tzinfo=pytz.UTC)} + ] + def test_empty_articles_list(self): + # Test handling of empty list + with self.assertRaises(ValueError): + f_663([], 'America/New_York') + def test_invalid_article_format(self): + # Test handling of improperly formatted articles list + with self.assertRaises(ValueError): + f_663([{'wrong_key': 'wrong_value'}], 'America/New_York') + def test_conversion_and_grouping(self): + timezone = 'America/New_York' + result_df = f_663(self.articles, timezone) + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 3.0, 'Sports': 10.0, 'Technology': 7.0}, + 'min': {'Health': 3, 'Sports': 10, 'Technology': 7}, + 'max': {'Health': 3, 'Sports': 10, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + # Ensure the data types match, especially for integer columns + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + expected_df.index.name = 'category' + pd.testing.assert_frame_equal(result_df, expected_df) + def test_article_timezone_conversion(self): + # Assuming test data has UTC as the base timezone and checking against London timezone + result = f_663(self.articles, 'Europe/London') + expected_hours = [8.0, 15.0, 12.0] + actual_hours = result.reset_index()['mean'].tolist() + self.assertEqual(expected_hours, actual_hours) + def test_different_timezones_across_categories(self): + # Create a set of articles across different categories and timezones + articles = [ + {'title': 'Tech Trends', 'title_url': 'tech.com/trends', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('UTC'))}, + {'title': 'World Sports', 'title_url': 'sports.com/world', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('Asia/Tokyo'))}, # +9 hours from UTC + {'title': 'Health News', 'title_url': 'health.com/news', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('America/Los_Angeles'))} + # -8 hours from UTC + ] + timezone = 'America/New_York' # UTC-5 + result_df = f_663(articles, timezone) + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 14.0, 'Sports': 21.0, 'Technology': 7.0}, + # Converting 12:00 from respective timezones to New York time + 'min': {'Health': 14, 'Sports': 21, 'Technology': 7}, + 'max': {'Health': 14, 'Sports': 21, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + expected_df.index.name = 'category' + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + pd.testing.assert_frame_equal(result_df, expected_df) diff --git a/data/processed/f_674_simon_chien_edit_wo_doc.py b/data/processed/f_674_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..071db061 --- /dev/null +++ b/data/processed/f_674_simon_chien_edit_wo_doc.py @@ -0,0 +1,110 @@ +import collections +import numpy as np + + +def f_674(file_name): + """ + Find the most common value in each column of a csv file with column names. + + If some values occur the same number of times, the values are sorted + alphabetically and the first is considered most common. + + If an empty csv is passed, an empty dictionary is returned. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + dict: A dictionary with column names as keys and most common values as values. + + Requirements: + - collections + - numpy + + Example: + >>> common_values = f_674('sample.csv') + >>> print(common_values) + {'Name': 'Simon Velasquez', + 'Age': 21, + 'Fruit': 'Apple', + 'Genre': 'HipHop', + 'Height': 172} + + >>> common_values = f_674('test.csv') + >>> print(common_values) + {'Object': 'Chair', + 'Weight': '211kg', + 'Dancing Style': 'Waltz',} + """ + data = np.genfromtxt(file_name, delimiter=',', names=True, + dtype=None, encoding=None) + common_values = {} + + if len(np.atleast_1d(data)) == 0: + return {} + + if len(np.atleast_1d(data)) == 1: + for col in data.dtype.names: + common_values[col] = data[col].item() + + else: + for col in data.dtype.names: + counter = collections.Counter(data[col]) + if counter.most_common(2)[0][1] == counter.most_common(2)[1][1]: + common_values[col] = sorted(counter.items())[0][0] + else: + common_values[col] = counter.most_common(1)[0][0] + + return common_values + +import unittest +import os +import shutil +import tempfile +import csv +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to house the CSV files + self.test_dir = tempfile.mkdtemp() + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + def create_csv(self, file_name, headers, data): + # Helper function to create a CSV file + path = os.path.join(self.test_dir, file_name) + with open(path, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=headers) + writer.writeheader() + for row in data: + writer.writerow(row) + return path + def test_empty_csv(self): + # Test for an empty CSV file + file_path = self.create_csv('empty.csv', ['Name', 'Age'], []) + result = f_674(file_path) + self.assertEqual(result, {}) + def test_single_entry(self): + # Test for a CSV file with a single entry + file_path = self.create_csv('single.csv', ['Name', 'Age'], [{'Name': 'John', 'Age': '30'}]) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'John', 'Age': 30}) + def test_common_values_sorted(self): + # Test for common values, ensuring alphabetical sorting + file_path = self.create_csv('common_values.csv', ['Fruit'], [{'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Cherry'}]) + result = f_674(file_path) + self.assertEqual(result, {'Fruit': 'Apple'}) + def test_multiple_columns(self): + # Test for multiple columns and entries + data = [{'Name': 'Alice', 'Age': '25', 'Country': 'USA'}, + {'Name': 'Bob', 'Age': '30', 'Country': 'USA'}, + {'Name': 'Alice', 'Age': '25', 'Country': 'Canada'}] + file_path = self.create_csv('multi_columns.csv', ['Name', 'Age', 'Country'], data) + result = f_674(file_path) + expected = {'Name': 'Alice', 'Age': 25, 'Country': 'USA'} + self.assertEqual(result, expected) + def test_tie_breaking(self): + # Test for tie-breaking in value counts + data = [{'Name': 'Alice'}, {'Name': 'Bob'}, {'Name': 'Alice'}, {'Name': 'Bob'}] + file_path = self.create_csv('tie.csv', ['Name'], data) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'Alice'}) diff --git a/data/processed/f_675_simon_chien_edit_wo_doc.py b/data/processed/f_675_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..99c68030 --- /dev/null +++ b/data/processed/f_675_simon_chien_edit_wo_doc.py @@ -0,0 +1,130 @@ +import pandas as pd +from sklearn.preprocessing import MinMaxScaler + + +def f_675(file_name: str) -> pd.DataFrame: + """Normalize data in a csv file using MinMaxScaler from sklearn. + Only numeric columns are normalized. Columns with other dtypes are left as + they are. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + DataFrame: A pandas DataFrame with normalized data. + + Raises: + ValueError: If input does not have numeric columns. + + Requirements: + - pandas + - sklearn.preprocessing.MinMaxScaler + + Example: + >>> normalized_data = f_675("sample.csv") + >>> print(normalized_data.head()) + + Name Age Salary + 0 Alex Anderson 0.304651 0.122298 + 1 Mr. Leslie Casey 0.28140 0.598905 + 2 Anthony George 0.996744 0.216552 + 3 Brian Washington 0.126279 0.459948 + 4 Elias Lawrence 0.337239 0.124185 + + >>> normalized_data = f_675("test.csv") + >>> print(normalized_data.head()) + + Fruit Weight Amount + 0 Aplple 1 0.5 + 1 Mr. Leslie Casey 0.32140 0.998905 + 2 Anthony George 0.8998888 0.123784 + 3 Brian Washington 0.121222 0.445321 + 4 Elias Lawrence 0.345223 0 + + """ + df = pd.read_csv(file_name) + if df.select_dtypes(include='number').empty: + raise ValueError("Input must at least have one numeric column.") + + scaler = MinMaxScaler() + numeric_columns = df.select_dtypes(include='number').columns + df[numeric_columns] = scaler.fit_transform(df[numeric_columns]) + + return df + +import unittest +import pandas as pd +import tempfile +import os +import shutil +class TestCases(unittest.TestCase): + def setUp(self): + # Set up a temporary directory + self.test_dir = tempfile.mkdtemp() + def tearDown(self): + # Clean up by removing the directory + shutil.rmtree(self.test_dir) + def create_csv(self, filename, data): + # Helper function to create a CSV file with the given data + full_path = os.path.join(self.test_dir, filename) + data.to_csv(full_path, index=False) + return full_path + def test_non_numeric_and_empty(self): + # Test with non-numeric and empty data + non_numeric_df = pd.DataFrame({ + "Name": ["Alice", "Bob"], + "City": ["New York", "Los Angeles"] + }) + empty_df = pd.DataFrame() + non_numeric_path = self.create_csv("non_numeric.csv", non_numeric_df) + empty_path = self.create_csv("empty.csv", empty_df) + self.assertRaises(ValueError, f_675, non_numeric_path) + self.assertRaises(ValueError, f_675, empty_path) + def test_single_row(self): + # Test with a single row of numeric data + single_row_df = pd.DataFrame({ + "Name": ["Olivia Anderson"], + "Age": [35], + "Salary": [58000] + }) + csv_path = self.create_csv("single_row.csv", single_row_df) + df = f_675(csv_path) + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] == 0).all() and (df['Salary'] == 0).all()) + def test_multiple_rows(self): + # Test multiple rows with numeric data + data_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000] + }) + csv_path = self.create_csv("multiple_rows.csv", data_df) + df = f_675(csv_path) + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + def test_mixed_columns(self): + # Test with a mix of numeric and non-numeric columns + mixed_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000], + "City": ["New York", "Chicago", "San Francisco"] + }) + csv_path = self.create_csv("mixed_columns.csv", mixed_df) + df = f_675(csv_path) + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + self.assertTrue('City' in df.columns and df['City'].equals(mixed_df['City'])) + def test_large_dataset(self): + # Test with a large dataset to ensure scalability + large_df = pd.DataFrame({ + "Age": range(10000), # Large range of ages + "Salary": range(10000, 20000) # Large range of salaries + }) + csv_path = self.create_csv("large_dataset.csv", large_df) + df = f_675(csv_path) + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) diff --git a/data/processed/f_694_simon_chien_edit_wo_doc.py b/data/processed/f_694_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..903b4d96 --- /dev/null +++ b/data/processed/f_694_simon_chien_edit_wo_doc.py @@ -0,0 +1,95 @@ +import pandas as pd +import numpy as np + + +def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): + """ + Creates a CSV file on a given file path with random numeric data. + The number of rows in the CSV file is determined by the 'num_rows' parameter, + and the number of columns (features) is determined by the 'data_dimensions' parameter. + Columns are named following the convention: 'Feature_x', where x is the number of the + feature column starting at 1. + + Parameters: + file_path (str): The file path where the CSV file should be created. + num_rows (int): The number of rows of random data to generate. + data_dimensions (int, optional): The number of columns (features) in the CSV file. Defaults to 5. + random_seed (int, optional): Seed used in rng. Defaults to None. + + Returns: + str: The file path of the generated CSV file. + + Requirements: + - pandas + - numpy + + Example: + >>> f_694('/tmp/data.csv', 100) + '/tmp/data.csv' + + >>> f_694('test.csv', 5, 2, random_seed=42) + 'test.csv' + >>> pd.read_csv('test.csv') + Feature_1 Feature_2 + 0 0.154163 0.740050 + 1 0.918747 0.900715 + 2 0.283828 0.606083 + 3 0.521226 0.552038 + 4 0.764560 0.020810 + + """ + np.random.seed(random_seed) + df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), + columns=[f'Feature_{i + 1}' for i in range(data_dimensions)]) + + df.to_csv(file_path, index=False) + + return file_path + +import unittest +import os +import pandas as pd +import shutil +import tempfile +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory for each test case + self.test_dir = tempfile.mkdtemp() + def tearDown(self): + # Remove the temporary directory after each test + shutil.rmtree(self.test_dir) + def test_basic_functionality(self): + # Test with default parameters + file_path = f_694(os.path.join(self.test_dir, 'data.csv'), 100) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 100) + self.assertEqual(len(df.columns), 5) + def test_custom_dimensions(self): + # Test with custom dimensions + file_path = f_694(os.path.join(self.test_dir, 'data_custom.csv'), 50, 7) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 50) + self.assertEqual(len(df.columns), 7) + def test_empty_file(self): + # Test generating an empty file + file_path = f_694(os.path.join(self.test_dir, 'empty.csv'), 0, 5) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 0) + def test_random_seed(self): + # Test reproducibility with a random seed + file_path1 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + file_path2 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + df1 = pd.read_csv(file_path1) + df2 = pd.read_csv(file_path2) + pd.testing.assert_frame_equal(df1, df2) + def test_no_columns(self): + # Test with zero columns + file_path = f_694(os.path.join(self.test_dir, 'no_columns.csv'), 10, 0) + self.assertTrue(os.path.exists(file_path)) + with open(file_path, 'r') as file: + data = file.read() + # Expect the file to contain only the headers or be empty + self.assertTrue(data == '' or all([x.strip() == '' for x in data.split(',')])) diff --git a/data/processed/f_723_simon_chien_edit_wo_doc.py b/data/processed/f_723_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..0e5d15e5 --- /dev/null +++ b/data/processed/f_723_simon_chien_edit_wo_doc.py @@ -0,0 +1,114 @@ +import pandas as pd + +import pandas as pd +import random + + +def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, seed=42): + """ + Search for matches with a specified regex pattern in a given column of a CSV file and optionally return a random sample of these matches. + + The random sampling is implemented by generating a random list of integers which are used as indices. + The number of generated indices is given by sample_size. + + + Parameters: + csv_file (str): Path to the CSV file. + column_name (str, optional): The name of the column to search. Defaults to 'data'. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + sample_size (int, optional): Number of random samples to return from the matches. If None, all matches are returned. Defaults to None. + seed (int, optional): Seed for the random number generator for reproducibility. Defaults to 42. + + Returns: + DataFrame: A pandas DataFrame containing either all the rows with matches or a random sample of them. + + Requirements: + - pandas + - random: for generating the random list of indices + + Example: + >>> result = f_723('sample.csv', column_name='data', pattern='\d+[xX]', sample_size=10, seed=42) + >>> print(result) + index data + 210 211 Fund several agency oil. Evening plant thank t... + 45 46 Language interest four take old. Education if ... + 525 526 Action million cultural stand. Heart explain a... + 465 466 Security face clearly every could. Image beaut... + 430 431 Popular produce floor part soldier human. Youn... + 260 261 Customer game focus respond that central. Nigh... + 195 196 The writer parent. Life social house west ten ... + 165 166 Main hotel production nothing.\r\nCoach voice ... + 810 811 Early right nature technology. Conference mind... + 60 61 Interest require gas wall. Different it see fi... + + >>> result = f_723('sample.csv', column_name='data', sample_size=2) + >>> print(result) + index data + 125 126 Fund elephenat, the dinoasuar eat this language t... + 21 22 Such an important story banking at the house a da... + + + + """ + df = pd.read_csv(csv_file) + matches = df[df[column_name].str.contains(pattern, na=False)] + + if sample_size is not None: + random.seed(seed) # Set the seed for reproducibility + sample_size = min(sample_size, len(matches)) # Ensure sample size is not greater than the number of matches + sampled_indices = random.sample(range(len(matches)), sample_size) # Randomly select indices + matches = matches.iloc[sampled_indices] # Select rows corresponding to sampled indices + + return matches + +import unittest +import pandas as pd +import tempfile +import shutil +import os +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to store the test CSV files + self.test_dir = tempfile.mkdtemp() + self.test_file = os.path.join(self.test_dir, "test_data.csv") + # Create a sample DataFrame + data = { + "data": ["123x good", "no match here", "456X bad", "789x good", "ABC"], + "other_column": ["data1", "data2", "data3", "data4", "data5"] + } + self.df = pd.DataFrame(data) + self.df.to_csv(self.test_file, index=False) + def tearDown(self): + # Remove temporary directory after the test + shutil.rmtree(self.test_dir) + def test_default_parameters(self): + result = f_723(self.test_file) + expected_data = { + "data": ["123x good", "456X bad", "789x good"], + "other_column": ["data1", "data3", "data4"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + def test_custom_column(self): + with self.assertRaises(KeyError): + f_723(self.test_file, column_name="nonexistent_column") + def test_custom_pattern(self): + result = f_723(self.test_file, pattern='\d+X') + expected_data = { + "data": ["456X bad"], + "other_column": ["data3"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + def test_sample_size(self): + result = f_723(self.test_file, sample_size=2, seed=42) + self.assertEqual(len(result), 2) + def test_no_matches(self): + result = f_723(self.test_file, pattern="nope") + self.assertTrue(result.empty) + def test_sample_size_larger_than_matches(self): + result = f_723(self.test_file, sample_size=10) + self.assertEqual(len(result), 3) # Only three matches exist + def test_zero_sample_size(self): + result = f_723(self.test_file, sample_size=0) + self.assertTrue(result.empty) diff --git a/data/processed/f_724_simon_chien_edit_wo_doc.py b/data/processed/f_724_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..2d89b5e1 --- /dev/null +++ b/data/processed/f_724_simon_chien_edit_wo_doc.py @@ -0,0 +1,124 @@ +import sqlite3 +import pandas as pd +import os + + +def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): + """ + Find all matches with a regex pattern in a list of strings in an SQL database. + + The function loads an sql database and selects all entries from the specified + table. All entries of the specified column are matched against a regex pattern. + Matches are returned in a DataFrame. + + Parameters: + db_file (str): The SQLite database file. + table_name (str): The name of the table to search. + column_name (str): The name of the column to search. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + + Returns: + DataFrame: A pandas DataFrame with the matches. + + Raises: + ValueError: If db_file does not exist. + + Requirements: + - sqlite3 + - pandas + - os + + Example: + >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column') + >>> print(result.head(10)) + id test_column + 0 1 4x4 car + 1 2 New 3x3 puzzle + 3 4 Product with 5X feature + 55 56 1xsafe + 56 57 3xmother + 57 58 5xenjoy + 58 59 2xhome + 59 60 3xanswer + 60 61 5xgirl + 61 62 5xkind + + >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column', pattern='kind') + >>> print(result) + id test_column + 20 21 To between successful ever ago PM toward today... + 42 43 Entire manage wife management perform size def... + 61 62 5xkind + """ + + if not os.path.isfile(db_file): + raise ValueError('db_file does not exist.') + + conn = sqlite3.connect(db_file) + df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn) + + if df[column_name].dtype == 'object': # Check if the column data type is a string + matches = df[df[column_name].str.contains(pattern)] + else: + matches = pd.DataFrame(columns=df.columns) # Return an empty DataFrame + + return matches + +import unittest +import sqlite3 +import pandas as pd +import os +import tempfile +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to hold the database + self.test_dir = tempfile.mkdtemp() + self.db_path = os.path.join(self.test_dir, "test.db") + # Set up a new database and populate it with initial data + self.conn = sqlite3.connect(self.db_path) + self.conn.execute("CREATE TABLE test_table (id INTEGER PRIMARY KEY, test_column TEXT)") + data = [ + (1, "4x4 car"), + (2, "New 3x3 puzzle"), + (3, "Product with 5X feature"), + (4, "1xsafe"), + (5, "3xmother") + ] + self.conn.executemany("INSERT INTO test_table (id, test_column) VALUES (?, ?)", data) + self.conn.commit() + def tearDown(self): + # Close the connection and remove the temporary directory + self.conn.close() + os.remove(self.db_path) + os.rmdir(self.test_dir) + def test_regular_expression_match(self): + # Test case with known data and expected matches + result = f_724(self.db_path, 'test_table', 'test_column') + expected = pd.DataFrame({ + 'id': [1, 2, 3, 4, 5], + 'test_column': ['4x4 car', 'New 3x3 puzzle', 'Product with 5X feature', '1xsafe', '3xmother'] + }, index=[0, 1, 2, 3, 4]) + pd.testing.assert_frame_equal(result, expected) + def test_no_matches(self): + # Test case where no entries match the pattern + result = f_724(self.db_path, 'test_table', 'test_column', pattern='abc') + self.assertTrue(result.empty) + def test_non_existent_table(self): + # Catch the OperationalError from sqlite directly + with self.assertRaises(pd.errors.DatabaseError): + f_724(self.db_path, 'fake_table', 'test_column') + def test_non_existent_column(self): + # Catch the correct exception for non-existent column + with self.assertRaises(KeyError): + f_724(self.db_path, 'test_table', 'fake_column') + def test_different_pattern(self): + # Test case with a different pattern + self.conn.execute("INSERT INTO test_table (id, test_column) VALUES (?, ?)", (6, "something 1ab2x")) + self.conn.commit() + result = f_724(self.db_path, 'test_table', 'test_column', pattern='1ab2x') + result.reset_index(drop=True, inplace=True) # Resetting index before comparison + expected = pd.DataFrame({ + 'id': [6], + 'test_column': ['something 1ab2x'] + }, index=[0]) + pd.testing.assert_frame_equal(result, expected) diff --git a/data/processed/f_725_simon_chien_edit_w_doc.py b/data/processed/f_725_simon_chien_edit_w_doc.py new file mode 100644 index 00000000..257f3d86 --- /dev/null +++ b/data/processed/f_725_simon_chien_edit_w_doc.py @@ -0,0 +1,102 @@ +import pandas as pd +from statsmodels.tsa.stattools import adfuller + + +def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool: + """ + Determines if a specific subset of data is stationary. + + Functionality: + 1. Filters rows where column_b > 50 and column_c == 900. + 2. Checks if the resulting subset of data in column_a is stationary using the Augmented Dickey-Fuller test. + 3. Returns True if the data is stationary, False otherwise. + + Data is considered to be stationary if the p_value returned by the + Augmented Dickey-Fuller test is smaller than 0.05. + + If column_a is empty after filtering or if its values are constant, True + is returned. + + Parameters: + df (pd.DataFrame): A DataFrame containing the data. + column_a (str): The name of the column to test for stationarity. + column_b (str): The name of the column used for filtering based on its value being greater than 50. + column_c (str): The name of the column used for filtering based on its value being equal to 900. + + Output: + bool: True if the data in column_a (after filtering based on column_b and column_c) is stationary, False otherwise. + + Requirements: + pandas + statsmodels: for using the adfuller test + + Example: + >>> df = pd.DataFrame({ + ... 'A': [1, 2, 3, 4, 5, 6], + ... 'B': [60, 70, 80, 90, 100, 110], + ... 'C': [900, 900, 900, 900, 900, 900] + ... }) + >>> f_725(df, 'A', 'B', 'C') + False + + >>> df = pd.DataFrame({ + ... 'TempA': [], + ... 'TempB': [], + ... 'TempC': [] + ... }) + >>> f_725(df, 'TempA', 'TempB', 'TempC') + True + """ + # Filter rows based on column_b and column_c + filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] + + if filtered_df[column_a].nunique() <= 1: + return True + + # If dataframe is empty after filtering, return False + if filtered_df.empty: + return True + + # Perform Augmented Dickey-Fuller test + adf_result = adfuller(filtered_df[column_a]) + p_value = adf_result[1] + return p_value <= 0.05 + +import unittest +import os +import pandas as pd +class TestCases(unittest.TestCase): + def setUp(self): + # Create DataFrame in setUp for test isolation + self.data = pd.DataFrame({ + 'A': list(range(100)), + 'B': [x * 2 for x in range(100)], + 'C': [900 if x % 2 == 0 else 800 for x in range(100)] + }) + def test_constant_value(self): + # All values in column A are constant after filtering + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is constant.") + def test_empty_after_filter(self): + # After filtering, no rows remain + result = f_725(self.data[self.data['B'] > 1000], 'A', 'B', 'C') + self.assertTrue(result, "Should be True as no data remains after filter.") + def test_non_stationary_data(self): + # Test a clearly non-stationary dataset + result = f_725(self.data, 'A', 'B', 'C') + self.assertFalse(result, "Should be False as data is non-stationary.") + def test_stationary_data(self): + # Test a stationary dataset + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is stationary.") + def test_edge_case_small_dataset(self): + # Test a very small dataset + small_data = pd.DataFrame({ + 'A': [1, 1], + 'B': [60, 70], + 'C': [900, 900] + }) + result = f_725(small_data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True due to small dataset size or no variation.") diff --git a/data/processed/f_728_simon_chien_edit_w_doc.py b/data/processed/f_728_simon_chien_edit_w_doc.py new file mode 100644 index 00000000..b63b9bef --- /dev/null +++ b/data/processed/f_728_simon_chien_edit_w_doc.py @@ -0,0 +1,156 @@ +import pandas as pd +from collections import Counter + + +def f_728(data): + """ + Analyze a dictionary of student data to return a dataframe sorted by name and age in ascending order, + the average score per student as a pandas Series, and the most common age as an integer. + + Parameters: + data (dict): A dictionary containing student data with three keys: + - 'Name': List of student names. + - 'Age': List of student ages. + - 'Score': List of student scores. + + Returns: + pd.DataFrame, pd.Series, int or None: + - A dataframe sorted by 'Name' and 'Age' in ascending order. + - A series representing average scores indexed by student names. + - An integer representing the most common age or None if no data is available. + + Raises: + ValueError: If the dictionary does not have the required keys. + + Requirements: + - pandas + - collections + + Example: + >>> data = { + ... 'Name': ['Tom', 'Nick', 'John', 'Tom', 'John', 'John', 'Nick', 'Tom', 'John', 'Tom'], + ... 'Age': [20, 21, 19, 20, 19, 19, 21, 20, 19, 20], + ... 'Score': [85, 79, 92, 88, 90, 92, 81, 86, 90, 85] + ... } + >>> df, avg_scores, common_age = f_728(data) + >>> print(df) + Name Age Score + 2 John 19 92 + 4 John 19 90 + 5 John 19 92 + 8 John 19 90 + 1 Nick 21 79 + 6 Nick 21 81 + 0 Tom 20 85 + 3 Tom 20 88 + 7 Tom 20 86 + 9 Tom 20 85 + + >>> print(avg_scores) + Name + John 91.0 + Nick 80.0 + Tom 86.0 + Name: Score, dtype: float64 + >>> print(common_age) + 19 + + >>> data = { + ... 'Name': ['Simon', 'Alex', 'Tanja', 'Amanda', 'Tanja'], + ... 'Age': [21, 42, 54, 20, 54], + ... 'Score': [1, 1, 2, 3, 5] + ... } + >>> df, avg_scores, common_age = f_728(data) + >>> print(df) + Name Age Score + 1 Alex 42 1 + 3 Amanda 20 3 + 0 Simon 21 1 + 2 Tanja 54 2 + 4 Tanja 54 5 + >>> print(avg_scores) + Name + Alex 1.0 + Amanda 3.0 + Simon 1.0 + Tanja 3.5 + Name: Score, dtype: float64 + >>> print(common_age) + 54 + """ + + if not all(key in data for key in ['Name', 'Age', 'Score']): + raise ValueError("The dictionary must have the keys 'Name', 'Age', 'Score'") + + # Creating a dataframe and sorting it + df = pd.DataFrame(data).sort_values(['Name', 'Age']) + + # Calculating average scores + avg_scores = df.groupby('Name')['Score'].mean() + + # Getting the most common age + age_counts = Counter(df['Age']) + most_common_age = age_counts.most_common(1)[0][0] if age_counts else None + + return df, avg_scores, most_common_age + +import unittest +import pandas as pd +import os +class TestCases(unittest.TestCase): + def test_wrong_keys(self): + # Testing with incorrect dictionary keys + data = { + 'Names': ['Tom', 'Nick'], + 'Ages': [20, 21], + 'Scores': [85, 79] + } + with self.assertRaises(ValueError): + f_728(data) + def test_correct_processing(self): + # Testing with correctly formatted data + data = { + 'Name': ['Tom', 'Nick', 'Tom', 'John'], + 'Age': [20, 21, 20, 19], + 'Score': [85, 79, 88, 92] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(df.iloc[0]['Name'], 'John') + self.assertAlmostEqual(avg_scores['Tom'], 86.5) + self.assertEqual(common_age, 20) + def test_empty_data(self): + # Testing with empty lists + data = {'Name': [], 'Age': [], 'Score': []} + df, avg_scores, common_age = f_728(data) + self.assertTrue(df.empty) + self.assertTrue(avg_scores.empty) + self.assertIsNone(common_age) + def test_all_same_age(self): + # Testing with all students having the same age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [25, 25, 25], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 25) + def test_no_common_age(self): + # Testing with no common age, each student has a unique age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [24, 25, 26], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 24) # Assuming the first element is taken if all are equally common + def test_duplicate_names_different_ages(self): + # Testing with duplicate names but different ages + data = { + 'Name': ['Tom', 'Tom', 'Nick'], + 'Age': [20, 21, 21], + 'Score': [85, 88, 79] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(len(df[df['Name'] == 'Tom']), 2) + self.assertNotEqual(df.iloc[0]['Age'], df.iloc[1]['Age']) + self.assertTrue(df[df['Name'] == 'Tom'].Age.isin([20, 21]).all()) diff --git a/data/processed/f_730_simon_chien_edit_wo_doc.py b/data/processed/f_730_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..ddf0b410 --- /dev/null +++ b/data/processed/f_730_simon_chien_edit_wo_doc.py @@ -0,0 +1,107 @@ +import pandas as pd +import os + + +def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: + """ + Merge / Concatenate multiple CSV files from a specified directory into a single Pandas DataFrame. + + If an empty list of files is passed, an empty DataFrame is returned. + + Parameters: + data_dir (str): The directory path where the CSV files are located. + csv_files (list): A list of CSV file names to be merged. + + Returns: + pd.DataFrame: A pandas DataFrame with the merged data. + + Requirements: + - pandas + - os + + Example: + >>> df = f_730('/path/to/data/directory', ['file1.csv', 'file2.csv', 'file3.csv']) + >>> print(df.head()) + Name Age Gender + 0 Simon 5 Male + 1 Bobby 32 Male + 0 Elena 13 Female + 1 Tom 23 Male + 0 Franko 12 Male + + >>> df = f_730('/path/to/data/directory', ['file1.csv', 'other_file.csv']) + >>> print(df.head()) + Name Age Gender Animal Size + 0 Simon 5 Male None None + 1 Bobby 32 Male None None + 0 Elena 13 Female None None + 2 None None None Tiger 12 + """ + merged_df = pd.DataFrame() + + for file in csv_files: + file_path = os.path.join(data_dir, file) + df = pd.read_csv(file_path) + merged_df = pd.concat([merged_df, df], ignore_index=True) + + return merged_df + +import unittest +import pandas as pd +import os +import shutil +import tempfile +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to hold CSV files + self.test_dir = tempfile.mkdtemp() + self.files = { + 'file1.csv': pd.DataFrame({ + 'Name': ['Alice', 'Bob'], + 'Age': [25, 30] + }), + 'file2.csv': pd.DataFrame({ + 'Name': ['Charlie'], + 'Age': [35] + }), + 'file3.csv': pd.DataFrame({ + 'Name': ['David', 'Eve'], + 'Age': [45, 55], + 'Gender': ['Male', 'Female'] + }), + 'file4.csv': pd.DataFrame({ + 'Name': ['Faythe'], + 'Animal': ['Cat'] + }) + } + # Write files to disk + for filename, df in self.files.items(): + df.to_csv(os.path.join(self.test_dir, filename), index=False) + def tearDown(self): + # Clean up the temporary directory + shutil.rmtree(self.test_dir) + def test_with_multiple_files(self): + # Test merging multiple files + result = f_730(self.test_dir, ['file1.csv', 'file2.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file2.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + def test_with_different_columns(self): + # Test files with different columns + result = f_730(self.test_dir, ['file1.csv', 'file3.csv', 'file4.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file3.csv'], self.files['file4.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + def test_with_empty_list(self): + # Test with an empty list of files + result = f_730(self.test_dir, []) + self.assertTrue(result.empty) + def test_with_nonexistent_file(self): + # Test referencing a non-existent file + with self.assertRaises(FileNotFoundError): + f_730(self.test_dir, ['nonexistent.csv']) + def test_single_file(self): + # Test with a single file + result = f_730(self.test_dir, ['file2.csv']) + expected_df = self.files['file2.csv'] + pd.testing.assert_frame_equal(result, expected_df) diff --git a/data/processed/f_731_simon_chien_edit_wo_doc.py b/data/processed/f_731_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..79d35027 --- /dev/null +++ b/data/processed/f_731_simon_chien_edit_wo_doc.py @@ -0,0 +1,127 @@ +import os +import pandas as pd +import numpy as np + + +def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: + """ + Load a CSV file into a pandas DataFrame and replace the NaN values in + numeric columns with the mean of the corresponding column. + The resulting DataFrame is returned. + + If an empty csv is passed, an empty DataFrame is returned. + + Parameters: + - data_dir (str): The path to the directory containing the CSV file. + - csv_file (str): The name of the CSV file to be processed. + + Returns: + pd.DataFrame: A pandas DataFrame with the processed data. + + Raises: + FileNotFoundError: If csv_file does not exist. + + Requirements: + - os + - pandas + - numpy + + Example: + >>> df = f_731("/path/to/data/directory", "file.csv") + >>> print(df) + Fruit Taste Cost + 0 Apple Good 1 + 1 Orange NaN 2 + 2 Avocado Bad 1.667 + 3 Coconut Tasty 2 + + >>> df = f_731("/path/to/data/directory", "test.csv") + >>> print(df) + Name Score + 0 Alex 25.2 + 1 Tanja 31.5 + 2 Maine 99 + 3 Lisa 100 + 4 Simone 63.925 + """ + file_path = os.path.join(data_dir, csv_file) + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return pd.DataFrame() + + for column in df.columns: + if np.issubdtype(df[column].dtype, np.number): # checking for numeric columns + df[column].fillna(df[column].mean(), inplace=True) + + return df + +import unittest +import pandas as pd +import numpy as np +import os +import tempfile +import shutil +class TestCases(unittest.TestCase): + def setUp(self): + self.folder_path = 'f_731_data_simon' + def setUp(self): + # Create a temporary directory for test data + self.test_dir = tempfile.mkdtemp() + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + def create_csv(self, filename, data): + # Helper method to create a CSV file + filepath = os.path.join(self.test_dir, filename) + data.to_csv(filepath, index=False) + return filename + def test_empty_csv(self): + # Test with an empty CSV file + filename = self.create_csv('empty.csv', pd.DataFrame()) + result = f_731(self.test_dir, filename) + self.assertTrue(result.empty) + def test_numeric_columns_nan_replacement(self): + data = pd.DataFrame({ + 'Age': [25, np.nan, 30], + 'Salary': [50000, 60000, np.nan] + }) + filename = self.create_csv('data.csv', data) + expected = pd.DataFrame({ + 'Age': [25.0, 27.5, 30.0], # Ensure all ages are floats + 'Salary': [50000.0, 60000.0, 55000.0] # Ensure all salaries are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + def test_mixed_columns(self): + data = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [np.nan, 88, 92] + }) + filename = self.create_csv('mixed.csv', data) + expected = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [90.0, 88.0, 92.0] # Ensure all scores are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + def test_all_nan_column(self): + # Test with a column that is entirely NaN + data = pd.DataFrame({ + 'Empty': [np.nan, np.nan, np.nan] + }) + filename = self.create_csv('all_nan.csv', data) + result = f_731(self.test_dir, filename) + self.assertTrue(result['Empty'].isnull().all()) + def test_no_numeric_data(self): + # Test a CSV file with no numeric data + data = pd.DataFrame({ + 'City': ['New York', 'Los Angeles', 'Chicago'] + }) + filename = self.create_csv('cities.csv', data) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, data) + def test_file_not_found(self): + # Test the FileNotFoundError + with self.assertRaises(FileNotFoundError): + f_731(self.test_dir, "non_existent.csv") diff --git a/data/processed/f_732_simon_chien_edit_wo_doc.py b/data/processed/f_732_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..12f72773 --- /dev/null +++ b/data/processed/f_732_simon_chien_edit_wo_doc.py @@ -0,0 +1,123 @@ +import os +import random +import pandas as pd + + +def f_732(data_dir, + csv_files=['file1.csv', 'file2.csv', 'file3.csv'], + seed=None): + """ + Randomly select one of the provided csv_files and select a certain number + of records from the file at random. + The selected records are returned in a DataFrame. + The name of the selected csv_file is also returned. + + If the csv_file is empty return an empty DataFrame. + + Parameters: + data_dir (str): The directory where the CSV files are located. + csv_files (list of str): The list of CSV files to choose from. Default is ['file1.csv', 'file2.csv', 'file3.csv']. + seed (int, optional): Seed for random number generation and for sampling from the csv. + + Returns: + tuple: A tuple containing two elements: + - str: The name of the randomly selected file. + - DataFrame: A pandas DataFrame with the selected rows. + + Requirements: + - os + - random + - pandas + + Example: + >>> file_name, df = f_732('test_data') + >>> print(file_name) + 'file2.csv' + >>> print(df) + Animal Weight + 0 Cat 1 + 21 Mouse 12 + 15 Elephant 1000 + 2 Tiger 500 + + >>> file_name, df = f_732('data', csv_files=['test1.csv', 'test2.csv'], seed=42) + >>> print(file_name) + 'test1.csv' + >>> print(df) + Name House Salary + 12 Simba mansion 11111 + 231 Dolores mansion 2222 + 135 Elaine shed 93274 + 21 Sophia garden 111 + """ + + random.seed(seed) + + file = csv_files[random.randint(0, len(csv_files) - 1)] + file_path = os.path.join(data_dir, file) + + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return file, pd.DataFrame() + + selected_rows = df.sample(n=random.randint(1, len(df)), random_state=seed) + + return file, selected_rows + +import unittest +import pandas as pd +import os +import tempfile +import shutil +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory + self.test_dir = tempfile.mkdtemp() + self.test_files = [ + 'file1.csv', 'file2.csv', 'file3.csv', 'file4.csv', 'file5.csv', 'empty.csv' + ] + # Sample data for CSV files + data = { + 'file1.csv': pd.DataFrame({'Name': ['Alice', 'Bob'], 'Age': [25, 30]}), + 'file2.csv': pd.DataFrame({'Name': ['Chris', 'Dana'], 'Age': [35, 40]}), + 'file3.csv': pd.DataFrame({'Name': ['Eve', 'Frank'], 'Age': [45, 50]}), + 'file4.csv': pd.DataFrame({'Name': ['Grace', 'Hank'], 'Age': [55, 60]}), + 'file5.csv': pd.DataFrame({'Name': ['Ivan', 'Julia'], 'Age': [65, 70]}), + 'empty.csv': pd.DataFrame() + } + # Create CSV files in the directory + for file_name, df in data.items(): + df.to_csv(os.path.join(self.test_dir, file_name), index=False) + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self.test_dir) + def test_random_selection(self): + # Testing random selection and ensuring the file chosen and its data are correct + file_name, df = f_732(self.test_dir, seed=42) + self.assertTrue(file_name in self.test_files) + self.assertFalse(df.empty) + def test_specific_file_selection(self): + # Test selecting a specific file and checking contents + file_name, df = f_732(self.test_dir, ['file1.csv'], seed=42) + expected = pd.read_csv(os.path.join(self.test_dir, 'file1.csv')) + # Sample from expected and reset index + expected_sampled = expected.sample(len(df), random_state=42).reset_index(drop=True) + # Reset index of df to ensure indices match + df_reset = df.reset_index(drop=True) + # Assert frame equality + pd.testing.assert_frame_equal(df_reset, expected_sampled) + def test_empty_file(self): + # Ensure an empty file returns an empty DataFrame + file_name, df = f_732(self.test_dir, ['empty.csv'], seed=42) + self.assertEqual(file_name, 'empty.csv') + self.assertTrue(df.empty) + def test_multiple_files(self): + # Testing selection from multiple files + file_name, df = f_732(self.test_dir, ['file3.csv', 'file4.csv'], seed=24) + self.assertIn(file_name, ['file3.csv', 'file4.csv']) + self.assertFalse(df.empty) + def test_no_file_matches(self): + # Testing behavior when no files match the list + with self.assertRaises(FileNotFoundError): + f_732(self.test_dir, ['nonexistent.csv'], seed=42) diff --git a/data/processed/f_733_simon_chien_edit_wo_doc.py b/data/processed/f_733_simon_chien_edit_wo_doc.py new file mode 100644 index 00000000..a5d34845 --- /dev/null +++ b/data/processed/f_733_simon_chien_edit_wo_doc.py @@ -0,0 +1,104 @@ +import pandas as pd +from sklearn.linear_model import LinearRegression +from sklearn.model_selection import train_test_split + + +def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): + """ + Train a linear regression model on a dataset and predict the value of a particular attribute. + This function reads a CSV file to create a pandas DataFrame, separates the data into + training and testing sets, and performs linear regression. It returns the predicted + values for the testing set as well as the trained model. + + Parameters: + csv_file_path (str): The path to the CSV file containing the data set. + attribute (str): The attribute to predict. + test_size (float, optional): Proportion of the dataset to include in the test split. Default is 0.2. + random_state (int, optional): Seed used by the random number generator. Default is 42. + + Returns: + tuple: A tuple containing: + - model (LinearRegression): The trained linear regression model. + - predictions (ndarray): An array of predicted values for the test set. + + Requirements: + - pandas + - sklearn.linear_model + - sklearn.model_selection + + Example: + >>> model, predictions = f_733("/path/to/data.csv", "target") + >>> print(predictions) + [123.45, ..., 126.78] + + >>> model, predictions = f_733("/path/to/test.csv", "target") + >>> print(predictions) + [1.2423, 4.2313, 28.2219, 10.3092] + + Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. + """ + df = pd.read_csv(csv_file_path) + X = df.drop(columns=[attribute]) + y = df[attribute] + + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=test_size, random_state=random_state + ) + + model = LinearRegression() + model.fit(X_train, y_train) + + predictions = model.predict(X_test) + return model, predictions + +import unittest +import numpy as np +import pandas as pd +import tempfile +import os +from sklearn.linear_model import LinearRegression +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary CSV file to simulate test environments + self.temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False, suffix='.csv') + self.csv_file_path = self.temp_file.name + self.temp_file.close() # Close the file immediately after creation + def tearDown(self): + # Remove the temporary file after the test + os.unlink(self.csv_file_path) + def create_csv(self, data, header=True): + # Utility to create CSV content + df = pd.DataFrame(data) + df.to_csv(self.csv_file_path, index=False, header=header) + def test_valid_data(self): + # Valid CSV and attribute + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target") + self.assertIsInstance(model, LinearRegression) + self.assertIsInstance(predictions, np.ndarray) + self.assertEqual(len(predictions), 1) # 20% of 3 is 0.6, rounds to 1 + def test_different_test_size(self): + # Changing the test size + data = {'feature1': range(10), 'feature2': range(10, 20), 'target': range(20, 30)} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target", test_size=0.3) + self.assertEqual(len(predictions), 3) # 30% of 10 is 3 + def test_invalid_attribute(self): + # Attribute not present in the CSV + data = {'feature1': [1, 2], 'feature2': [3, 4]} + self.create_csv(data) + with self.assertRaises(KeyError): + f_733(self.csv_file_path, "nonexistent_target") + def test_csv_with_missing_values(self): + # CSV containing missing values in features + data = {'feature1': [1, np.nan, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") + def test_predicting_non_numerical_data(self): + # Non-numerical data in target + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': ['a', 'b', 'c']} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") From eb6a692e4034c7eb5fcfd013822443f17dcab3f4 Mon Sep 17 00:00:00 2001 From: vumichien Date: Thu, 25 Apr 2024 21:01:01 +0900 Subject: [PATCH 3/3] remove test folder, updating the corresponding samples, remove some examples from each sample --- data/clean/f_657_simon_chien_edit.py | 3 - data/clean/f_663_simon_chien_edit.py | 26 - data/clean/f_674_simon_chien_edit.py | 6 - data/clean/f_675_simon_chien_edit.py | 11 - data/clean/f_694_simon_chien_edit.py | 11 - data/clean/f_723_simon_chien_edit.py | 9 - data/clean/f_724_simon_chien_edit.py | 7 - data/clean/f_725_simon_chien_edit.py | 8 - data/clean/f_728_simon_chien_edit.py | 32 - data/clean/f_730_simon_chien_edit.py | 8 - data/clean/f_731_simon_chien_edit.py | 9 - data/clean/f_732_simon_chien_edit.py | 16 - data/clean/f_733_simon_chien_edit.py | 8 +- .../f_657_simon_chien_edit_wo_doc.py | 3 - .../processed/f_663_simon_chien_edit_w_doc.py | 26 - .../f_674_simon_chien_edit_wo_doc.py | 6 - .../f_675_simon_chien_edit_wo_doc.py | 11 - .../f_694_simon_chien_edit_wo_doc.py | 11 - .../f_723_simon_chien_edit_wo_doc.py | 9 - .../f_724_simon_chien_edit_wo_doc.py | 7 - .../processed/f_725_simon_chien_edit_w_doc.py | 8 - .../processed/f_728_simon_chien_edit_w_doc.py | 32 - .../f_730_simon_chien_edit_wo_doc.py | 8 - .../f_731_simon_chien_edit_wo_doc.py | 9 - .../f_732_simon_chien_edit_wo_doc.py | 10 - .../f_733_simon_chien_edit_wo_doc.py | 8 +- data/raw/f_657_data_simon/test_case_2/1.txt | 5 - data/raw/f_657_data_simon/test_case_2/2.txt | 6 - .../test_case_2/subfolder1/3.txt | 4 - .../test_case_3/folder/folder/folder/test.txt | 4 - .../f_657_data_simon/test_case_4/file1.txt | 1 - .../f_657_data_simon/test_case_4/sf/error.txt | 1 - .../f_657_data_simon/test_case_4/sf/file2.txt | 1 - .../test_case_5/many_error.txt | 1 - data/raw/f_657_data_simon/test_case_6/1.txt | 0 data/raw/f_657_data_simon/test_case_6/2.png | 0 data/raw/f_657_data_simon/test_case_6/3.jpg | 0 data/raw/f_657_data_simon/test_case_6/4,py | 0 .../f_657_data_simon/test_case_6/folder/3.cpp | 1 - data/raw/f_657_simon.py | 117 -- data/raw/f_657_simon_chien_edit.py | 141 ++ .../f_661_linear_regression_test.csv | 11 - data/raw/f_661_data_simon/test_data.csv | 19 - ...661_simon.py => f_661_simon_chien_edit.py} | 255 +-- data/raw/f_663_data_simon/test_data1.csv | 1001 ---------- .../f_663_data_simon/test_data1_result.csv | 6 - data/raw/f_663_data_simon/test_data2.csv | 101 - .../f_663_data_simon/test_data2_result.csv | 6 - data/raw/f_663_data_simon/test_data3.csv | 5 - .../f_663_data_simon/test_data3_result.csv | 4 - data/raw/f_663_simon.py | 171 -- data/raw/f_663_simon_chien_edit.py | 167 ++ data/raw/f_674_data_simon/test_data_0.csv | 101 - data/raw/f_674_data_simon/test_data_1.csv | 101 - .../f_674_data_simon/test_data_1column.csv | 2 - data/raw/f_674_data_simon/test_data_2.csv | 101 - data/raw/f_674_data_simon/test_data_3.csv | 101 - data/raw/f_674_data_simon/test_data_4.csv | 101 - data/raw/f_674_data_simon/test_data_empty.csv | 1 - data/raw/f_674_simon.py | 144 -- data/raw/f_674_simon_chien_edit.py | 126 ++ data/raw/f_675_data_simon/test_data_0.csv | 101 - data/raw/f_675_data_simon/test_data_1.csv | 101 - data/raw/f_675_data_simon/test_data_2.csv | 101 - data/raw/f_675_data_simon/test_data_3.csv | 101 - data/raw/f_675_data_simon/test_data_4.csv | 101 - data/raw/f_675_data_simon/test_empty.csv | 1 - .../raw/f_675_data_simon/test_non_numeric.csv | 3 - data/raw/f_675_data_simon/test_res_0.csv | 101 - data/raw/f_675_data_simon/test_res_1.csv | 101 - data/raw/f_675_data_simon/test_res_2.csv | 101 - data/raw/f_675_data_simon/test_res_3.csv | 101 - data/raw/f_675_data_simon/test_res_4.csv | 101 - data/raw/f_675_data_simon/test_single.csv | 2 - data/raw/f_675_simon.py | 153 -- data/raw/f_675_simon_chien_edit.py | 146 ++ data/raw/f_694_data_simon/test_data_1.csv | 401 ---- data/raw/f_694_data_simon/test_data_2.csv | 101 - data/raw/f_694_data_simon/test_data_3.csv | 1 - data/raw/f_694_data_simon/test_data_4.csv | 11 - data/raw/f_694_simon.py | 111 -- data/raw/f_694_simon_chien_edit.py | 105 + .../f_723_data_simon/complex_test_data.csv | 1734 ----------------- ...723_simon.py => f_723_simon_chien_edit.py} | 258 ++- data/raw/f_724_data_simon/empty.db | 0 data/raw/f_724_data_simon/sample.db | Bin 20480 -> 0 bytes data/raw/f_724_simon.py | 253 --- data/raw/f_724_simon_chien_edit.py | 139 ++ data/raw/f_725_data_simon/complex_data.csv | 1001 ---------- ...725_simon.py => f_725_simon_chien_edit.py} | 245 ++- data/raw/f_728_data_simon/data_large.csv | 201 -- data/raw/f_728_data_simon/data_medium.csv | 51 - data/raw/f_728_data_simon/data_small.csv | 11 - data/raw/f_728_data_simon/large_res.csv | 201 -- data/raw/f_728_data_simon/large_series.csv | 143 -- data/raw/f_728_data_simon/medium_res.csv | 51 - data/raw/f_728_data_simon/medium_series.csv | 45 - data/raw/f_728_simon.py | 208 -- data/raw/f_728_simon_chien_edit.py | 144 ++ data/raw/f_730_data_simon/test_file1.csv | 3 - data/raw/f_730_data_simon/test_file2.csv | 3 - data/raw/f_730_data_simon/test_file3.csv | 3 - data/raw/f_730_data_simon/test_file4.csv | 3 - data/raw/f_730_simon.py | 126 -- data/raw/f_730_simon_chien_edit.py | 120 ++ data/raw/f_731_data_simon/empty.csv | 0 data/raw/f_731_data_simon/mock_data1.csv | 6 - data/raw/f_731_data_simon/mock_data2.csv | 5 - data/raw/f_731_data_simon/strings.csv | 5 - data/raw/f_731_simon.py | 113 -- data/raw/f_731_simon_chien_edit.py | 142 ++ data/raw/f_732_data_simon/test_file1.csv | 15 - data/raw/f_732_data_simon/test_file2.csv | 12 - data/raw/f_732_data_simon/test_file3.csv | 15 - data/raw/f_732_data_simon/test_file4.csv | 13 - data/raw/f_732_data_simon/test_file5.csv | 19 - data/raw/f_732_simon.py | 141 -- data/raw/f_732_simon_chien_edit.py | 133 ++ data/raw/f_733_data_simon/mock_data.csv | 101 - .../mock_data_with_non_numerical.csv | 11 - data/raw/f_733_simon.py | 116 -- data/raw/f_733_simon_chien_edit.py | 122 ++ 122 files changed, 1860 insertions(+), 9205 deletions(-) delete mode 100644 data/raw/f_657_data_simon/test_case_2/1.txt delete mode 100644 data/raw/f_657_data_simon/test_case_2/2.txt delete mode 100644 data/raw/f_657_data_simon/test_case_2/subfolder1/3.txt delete mode 100644 data/raw/f_657_data_simon/test_case_3/folder/folder/folder/test.txt delete mode 100644 data/raw/f_657_data_simon/test_case_4/file1.txt delete mode 100644 data/raw/f_657_data_simon/test_case_4/sf/error.txt delete mode 100644 data/raw/f_657_data_simon/test_case_4/sf/file2.txt delete mode 100644 data/raw/f_657_data_simon/test_case_5/many_error.txt delete mode 100644 data/raw/f_657_data_simon/test_case_6/1.txt delete mode 100644 data/raw/f_657_data_simon/test_case_6/2.png delete mode 100644 data/raw/f_657_data_simon/test_case_6/3.jpg delete mode 100644 data/raw/f_657_data_simon/test_case_6/4,py delete mode 100644 data/raw/f_657_data_simon/test_case_6/folder/3.cpp delete mode 100644 data/raw/f_657_simon.py create mode 100644 data/raw/f_657_simon_chien_edit.py delete mode 100644 data/raw/f_661_data_simon/f_661_linear_regression_test.csv delete mode 100644 data/raw/f_661_data_simon/test_data.csv rename data/raw/{f_661_simon.py => f_661_simon_chien_edit.py} (75%) delete mode 100644 data/raw/f_663_data_simon/test_data1.csv delete mode 100644 data/raw/f_663_data_simon/test_data1_result.csv delete mode 100644 data/raw/f_663_data_simon/test_data2.csv delete mode 100644 data/raw/f_663_data_simon/test_data2_result.csv delete mode 100644 data/raw/f_663_data_simon/test_data3.csv delete mode 100644 data/raw/f_663_data_simon/test_data3_result.csv delete mode 100644 data/raw/f_663_simon.py create mode 100644 data/raw/f_663_simon_chien_edit.py delete mode 100644 data/raw/f_674_data_simon/test_data_0.csv delete mode 100644 data/raw/f_674_data_simon/test_data_1.csv delete mode 100644 data/raw/f_674_data_simon/test_data_1column.csv delete mode 100644 data/raw/f_674_data_simon/test_data_2.csv delete mode 100644 data/raw/f_674_data_simon/test_data_3.csv delete mode 100644 data/raw/f_674_data_simon/test_data_4.csv delete mode 100644 data/raw/f_674_data_simon/test_data_empty.csv delete mode 100644 data/raw/f_674_simon.py create mode 100644 data/raw/f_674_simon_chien_edit.py delete mode 100644 data/raw/f_675_data_simon/test_data_0.csv delete mode 100644 data/raw/f_675_data_simon/test_data_1.csv delete mode 100644 data/raw/f_675_data_simon/test_data_2.csv delete mode 100644 data/raw/f_675_data_simon/test_data_3.csv delete mode 100644 data/raw/f_675_data_simon/test_data_4.csv delete mode 100644 data/raw/f_675_data_simon/test_empty.csv delete mode 100644 data/raw/f_675_data_simon/test_non_numeric.csv delete mode 100644 data/raw/f_675_data_simon/test_res_0.csv delete mode 100644 data/raw/f_675_data_simon/test_res_1.csv delete mode 100644 data/raw/f_675_data_simon/test_res_2.csv delete mode 100644 data/raw/f_675_data_simon/test_res_3.csv delete mode 100644 data/raw/f_675_data_simon/test_res_4.csv delete mode 100644 data/raw/f_675_data_simon/test_single.csv delete mode 100644 data/raw/f_675_simon.py create mode 100644 data/raw/f_675_simon_chien_edit.py delete mode 100644 data/raw/f_694_data_simon/test_data_1.csv delete mode 100644 data/raw/f_694_data_simon/test_data_2.csv delete mode 100644 data/raw/f_694_data_simon/test_data_3.csv delete mode 100644 data/raw/f_694_data_simon/test_data_4.csv delete mode 100644 data/raw/f_694_simon.py create mode 100644 data/raw/f_694_simon_chien_edit.py delete mode 100644 data/raw/f_723_data_simon/complex_test_data.csv rename data/raw/{f_723_simon.py => f_723_simon_chien_edit.py} (54%) delete mode 100644 data/raw/f_724_data_simon/empty.db delete mode 100644 data/raw/f_724_data_simon/sample.db delete mode 100644 data/raw/f_724_simon.py create mode 100644 data/raw/f_724_simon_chien_edit.py delete mode 100644 data/raw/f_725_data_simon/complex_data.csv rename data/raw/{f_725_simon.py => f_725_simon_chien_edit.py} (52%) delete mode 100644 data/raw/f_728_data_simon/data_large.csv delete mode 100644 data/raw/f_728_data_simon/data_medium.csv delete mode 100644 data/raw/f_728_data_simon/data_small.csv delete mode 100644 data/raw/f_728_data_simon/large_res.csv delete mode 100644 data/raw/f_728_data_simon/large_series.csv delete mode 100644 data/raw/f_728_data_simon/medium_res.csv delete mode 100644 data/raw/f_728_data_simon/medium_series.csv delete mode 100644 data/raw/f_728_simon.py create mode 100644 data/raw/f_728_simon_chien_edit.py delete mode 100644 data/raw/f_730_data_simon/test_file1.csv delete mode 100644 data/raw/f_730_data_simon/test_file2.csv delete mode 100644 data/raw/f_730_data_simon/test_file3.csv delete mode 100644 data/raw/f_730_data_simon/test_file4.csv delete mode 100644 data/raw/f_730_simon.py create mode 100644 data/raw/f_730_simon_chien_edit.py delete mode 100644 data/raw/f_731_data_simon/empty.csv delete mode 100644 data/raw/f_731_data_simon/mock_data1.csv delete mode 100644 data/raw/f_731_data_simon/mock_data2.csv delete mode 100644 data/raw/f_731_data_simon/strings.csv delete mode 100644 data/raw/f_731_simon.py create mode 100644 data/raw/f_731_simon_chien_edit.py delete mode 100644 data/raw/f_732_data_simon/test_file1.csv delete mode 100644 data/raw/f_732_data_simon/test_file2.csv delete mode 100644 data/raw/f_732_data_simon/test_file3.csv delete mode 100644 data/raw/f_732_data_simon/test_file4.csv delete mode 100644 data/raw/f_732_data_simon/test_file5.csv delete mode 100644 data/raw/f_732_simon.py create mode 100644 data/raw/f_732_simon_chien_edit.py delete mode 100644 data/raw/f_733_data_simon/mock_data.csv delete mode 100644 data/raw/f_733_data_simon/mock_data_with_non_numerical.csv delete mode 100644 data/raw/f_733_simon.py create mode 100644 data/raw/f_733_simon_chien_edit.py diff --git a/data/clean/f_657_simon_chien_edit.py b/data/clean/f_657_simon_chien_edit.py index 1110b032..ea6c685b 100644 --- a/data/clean/f_657_simon_chien_edit.py +++ b/data/clean/f_657_simon_chien_edit.py @@ -30,9 +30,6 @@ def f_657(dir_path): Example: >>> f_657("/path/to/directory") {'file1.txt': 2, 'subdir/file2.txt': 1} - - >>> f_657("/path/to/directory") - {'test.txt': 245, 'subdir/test2.txt': 0, 'subdir/sf/test3.txt': 1} """ if not os.path.isdir(dir_path): diff --git a/data/clean/f_663_simon_chien_edit.py b/data/clean/f_663_simon_chien_edit.py index c7ff2f53..ed1e96ec 100644 --- a/data/clean/f_663_simon_chien_edit.py +++ b/data/clean/f_663_simon_chien_edit.py @@ -38,32 +38,6 @@ def f_663(articles, timezone): Health 1 3.0 3 3 Sports 1 19.0 19 19 Technology 1 8.0 8 8 - - >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 1 3.0 3 3 - Sports 1 19.0 19 19 - Technology 1 8.0 8 8 - - >>> articles = [ - ... {'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': '09:01:04.403278+00:00'}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': '02:03:04.403278+00:00'}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': '21:11:01.403278+00:00'}, - ... {'title': 'newsies', 'title_url': 'newsies.news', 'id': 21, 'category': 'Technology', 'published_time': '4:25:12.403278+00:00'}, - ... {'title': 'ORF', 'title_url': 'orf.at', 'id': 44, 'category': 'Health', 'published_time': '03:04:03.403278+00:00'}, - ... {'title': 'ARD', 'title_url': 'ard.com', 'id': 61, 'category': 'Health', 'published_time': '11:41:12.403278+00:00'}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 3 15.666667 7 23 - Sports 1 22.000000 22 22 - Technology 2 2.500000 0 5 """ if not isinstance(articles, list): diff --git a/data/clean/f_674_simon_chien_edit.py b/data/clean/f_674_simon_chien_edit.py index 02d37501..808e5cd8 100644 --- a/data/clean/f_674_simon_chien_edit.py +++ b/data/clean/f_674_simon_chien_edit.py @@ -29,12 +29,6 @@ def f_674(file_name): 'Fruit': 'Apple', 'Genre': 'HipHop', 'Height': 172} - - >>> common_values = f_674('test.csv') - >>> print(common_values) - {'Object': 'Chair', - 'Weight': '211kg', - 'Dancing Style': 'Waltz',} """ data = np.genfromtxt(file_name, delimiter=',', names=True, dtype=None, encoding=None) diff --git a/data/clean/f_675_simon_chien_edit.py b/data/clean/f_675_simon_chien_edit.py index cb709a6b..b948ac5c 100644 --- a/data/clean/f_675_simon_chien_edit.py +++ b/data/clean/f_675_simon_chien_edit.py @@ -30,17 +30,6 @@ def f_675(file_name: str) -> pd.DataFrame: 2 Anthony George 0.996744 0.216552 3 Brian Washington 0.126279 0.459948 4 Elias Lawrence 0.337239 0.124185 - - >>> normalized_data = f_675("test.csv") - >>> print(normalized_data.head()) - - Fruit Weight Amount - 0 Aplple 1 0.5 - 1 Mr. Leslie Casey 0.32140 0.998905 - 2 Anthony George 0.8998888 0.123784 - 3 Brian Washington 0.121222 0.445321 - 4 Elias Lawrence 0.345223 0 - """ df = pd.read_csv(file_name) if df.select_dtypes(include='number').empty: diff --git a/data/clean/f_694_simon_chien_edit.py b/data/clean/f_694_simon_chien_edit.py index 82d275fd..ffe6cd30 100644 --- a/data/clean/f_694_simon_chien_edit.py +++ b/data/clean/f_694_simon_chien_edit.py @@ -26,17 +26,6 @@ def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): Example: >>> f_694('/tmp/data.csv', 100) '/tmp/data.csv' - - >>> f_694('test.csv', 5, 2, random_seed=42) - 'test.csv' - >>> pd.read_csv('test.csv') - Feature_1 Feature_2 - 0 0.154163 0.740050 - 1 0.918747 0.900715 - 2 0.283828 0.606083 - 3 0.521226 0.552038 - 4 0.764560 0.020810 - """ np.random.seed(random_seed) df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), diff --git a/data/clean/f_723_simon_chien_edit.py b/data/clean/f_723_simon_chien_edit.py index 12d176d7..8eb40609 100644 --- a/data/clean/f_723_simon_chien_edit.py +++ b/data/clean/f_723_simon_chien_edit.py @@ -40,15 +40,6 @@ def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, see 165 166 Main hotel production nothing.\r\nCoach voice ... 810 811 Early right nature technology. Conference mind... 60 61 Interest require gas wall. Different it see fi... - - >>> result = f_723('sample.csv', column_name='data', sample_size=2) - >>> print(result) - index data - 125 126 Fund elephenat, the dinoasuar eat this language t... - 21 22 Such an important story banking at the house a da... - - - """ df = pd.read_csv(csv_file) matches = df[df[column_name].str.contains(pattern, na=False)] diff --git a/data/clean/f_724_simon_chien_edit.py b/data/clean/f_724_simon_chien_edit.py index b99369c3..fede53cc 100644 --- a/data/clean/f_724_simon_chien_edit.py +++ b/data/clean/f_724_simon_chien_edit.py @@ -42,13 +42,6 @@ def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): 59 60 3xanswer 60 61 5xgirl 61 62 5xkind - - >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column', pattern='kind') - >>> print(result) - id test_column - 20 21 To between successful ever ago PM toward today... - 42 43 Entire manage wife management perform size def... - 61 62 5xkind """ if not os.path.isfile(db_file): diff --git a/data/clean/f_725_simon_chien_edit.py b/data/clean/f_725_simon_chien_edit.py index aa38c947..301e2daa 100644 --- a/data/clean/f_725_simon_chien_edit.py +++ b/data/clean/f_725_simon_chien_edit.py @@ -38,14 +38,6 @@ def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool ... }) >>> f_725(df, 'A', 'B', 'C') False - - >>> df = pd.DataFrame({ - ... 'TempA': [], - ... 'TempB': [], - ... 'TempC': [] - ... }) - >>> f_725(df, 'TempA', 'TempB', 'TempC') - True """ # Filter rows based on column_b and column_c filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] diff --git a/data/clean/f_728_simon_chien_edit.py b/data/clean/f_728_simon_chien_edit.py index e90e9ffb..cf744212 100644 --- a/data/clean/f_728_simon_chien_edit.py +++ b/data/clean/f_728_simon_chien_edit.py @@ -45,38 +45,6 @@ def f_728(data): 3 Tom 20 88 7 Tom 20 86 9 Tom 20 85 - - >>> print(avg_scores) - Name - John 91.0 - Nick 80.0 - Tom 86.0 - Name: Score, dtype: float64 - >>> print(common_age) - 19 - - >>> data = { - ... 'Name': ['Simon', 'Alex', 'Tanja', 'Amanda', 'Tanja'], - ... 'Age': [21, 42, 54, 20, 54], - ... 'Score': [1, 1, 2, 3, 5] - ... } - >>> df, avg_scores, common_age = f_728(data) - >>> print(df) - Name Age Score - 1 Alex 42 1 - 3 Amanda 20 3 - 0 Simon 21 1 - 2 Tanja 54 2 - 4 Tanja 54 5 - >>> print(avg_scores) - Name - Alex 1.0 - Amanda 3.0 - Simon 1.0 - Tanja 3.5 - Name: Score, dtype: float64 - >>> print(common_age) - 54 """ if not all(key in data for key in ['Name', 'Age', 'Score']): diff --git a/data/clean/f_730_simon_chien_edit.py b/data/clean/f_730_simon_chien_edit.py index 57c2ed19..d5c68111 100644 --- a/data/clean/f_730_simon_chien_edit.py +++ b/data/clean/f_730_simon_chien_edit.py @@ -28,14 +28,6 @@ def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: 0 Elena 13 Female 1 Tom 23 Male 0 Franko 12 Male - - >>> df = f_730('/path/to/data/directory', ['file1.csv', 'other_file.csv']) - >>> print(df.head()) - Name Age Gender Animal Size - 0 Simon 5 Male None None - 1 Bobby 32 Male None None - 0 Elena 13 Female None None - 2 None None None Tiger 12 """ merged_df = pd.DataFrame() diff --git a/data/clean/f_731_simon_chien_edit.py b/data/clean/f_731_simon_chien_edit.py index 371f48c9..b866cf25 100644 --- a/data/clean/f_731_simon_chien_edit.py +++ b/data/clean/f_731_simon_chien_edit.py @@ -34,15 +34,6 @@ def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: 1 Orange NaN 2 2 Avocado Bad 1.667 3 Coconut Tasty 2 - - >>> df = f_731("/path/to/data/directory", "test.csv") - >>> print(df) - Name Score - 0 Alex 25.2 - 1 Tanja 31.5 - 2 Maine 99 - 3 Lisa 100 - 4 Simone 63.925 """ file_path = os.path.join(data_dir, csv_file) try: diff --git a/data/clean/f_732_simon_chien_edit.py b/data/clean/f_732_simon_chien_edit.py index 71a5bb53..b8f1be1e 100644 --- a/data/clean/f_732_simon_chien_edit.py +++ b/data/clean/f_732_simon_chien_edit.py @@ -33,22 +33,6 @@ def f_732(data_dir, >>> file_name, df = f_732('test_data') >>> print(file_name) 'file2.csv' - >>> print(df) - Animal Weight - 0 Cat 1 - 21 Mouse 12 - 15 Elephant 1000 - 2 Tiger 500 - - >>> file_name, df = f_732('data', csv_files=['test1.csv', 'test2.csv'], seed=42) - >>> print(file_name) - 'test1.csv' - >>> print(df) - Name House Salary - 12 Simba mansion 11111 - 231 Dolores mansion 2222 - 135 Elaine shed 93274 - 21 Sophia garden 111 """ random.seed(seed) diff --git a/data/clean/f_733_simon_chien_edit.py b/data/clean/f_733_simon_chien_edit.py index 6a0f3406..274c0103 100644 --- a/data/clean/f_733_simon_chien_edit.py +++ b/data/clean/f_733_simon_chien_edit.py @@ -26,16 +26,12 @@ def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): - sklearn.linear_model - sklearn.model_selection + Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. + Example: >>> model, predictions = f_733("/path/to/data.csv", "target") >>> print(predictions) [123.45, ..., 126.78] - - >>> model, predictions = f_733("/path/to/test.csv", "target") - >>> print(predictions) - [1.2423, 4.2313, 28.2219, 10.3092] - - Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. """ df = pd.read_csv(csv_file_path) X = df.drop(columns=[attribute]) diff --git a/data/processed/f_657_simon_chien_edit_wo_doc.py b/data/processed/f_657_simon_chien_edit_wo_doc.py index b476c896..ade09eaf 100644 --- a/data/processed/f_657_simon_chien_edit_wo_doc.py +++ b/data/processed/f_657_simon_chien_edit_wo_doc.py @@ -30,9 +30,6 @@ def f_657(dir_path): Example: >>> f_657("/path/to/directory") {'file1.txt': 2, 'subdir/file2.txt': 1} - - >>> f_657("/path/to/directory") - {'test.txt': 245, 'subdir/test2.txt': 0, 'subdir/sf/test3.txt': 1} """ if not os.path.isdir(dir_path): diff --git a/data/processed/f_663_simon_chien_edit_w_doc.py b/data/processed/f_663_simon_chien_edit_w_doc.py index 1550b6d2..32155fe3 100644 --- a/data/processed/f_663_simon_chien_edit_w_doc.py +++ b/data/processed/f_663_simon_chien_edit_w_doc.py @@ -38,32 +38,6 @@ def f_663(articles, timezone): Health 1 3.0 3 3 Sports 1 19.0 19 19 Technology 1 8.0 8 8 - - >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 1 3.0 3 3 - Sports 1 19.0 19 19 - Technology 1 8.0 8 8 - - >>> articles = [ - ... {'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': '09:01:04.403278+00:00'}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': '02:03:04.403278+00:00'}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': '21:11:01.403278+00:00'}, - ... {'title': 'newsies', 'title_url': 'newsies.news', 'id': 21, 'category': 'Technology', 'published_time': '4:25:12.403278+00:00'}, - ... {'title': 'ORF', 'title_url': 'orf.at', 'id': 44, 'category': 'Health', 'published_time': '03:04:03.403278+00:00'}, - ... {'title': 'ARD', 'title_url': 'ard.com', 'id': 61, 'category': 'Health', 'published_time': '11:41:12.403278+00:00'}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 3 15.666667 7 23 - Sports 1 22.000000 22 22 - Technology 2 2.500000 0 5 """ if not isinstance(articles, list): diff --git a/data/processed/f_674_simon_chien_edit_wo_doc.py b/data/processed/f_674_simon_chien_edit_wo_doc.py index 071db061..b47ca67c 100644 --- a/data/processed/f_674_simon_chien_edit_wo_doc.py +++ b/data/processed/f_674_simon_chien_edit_wo_doc.py @@ -29,12 +29,6 @@ def f_674(file_name): 'Fruit': 'Apple', 'Genre': 'HipHop', 'Height': 172} - - >>> common_values = f_674('test.csv') - >>> print(common_values) - {'Object': 'Chair', - 'Weight': '211kg', - 'Dancing Style': 'Waltz',} """ data = np.genfromtxt(file_name, delimiter=',', names=True, dtype=None, encoding=None) diff --git a/data/processed/f_675_simon_chien_edit_wo_doc.py b/data/processed/f_675_simon_chien_edit_wo_doc.py index 99c68030..cf10e706 100644 --- a/data/processed/f_675_simon_chien_edit_wo_doc.py +++ b/data/processed/f_675_simon_chien_edit_wo_doc.py @@ -30,17 +30,6 @@ def f_675(file_name: str) -> pd.DataFrame: 2 Anthony George 0.996744 0.216552 3 Brian Washington 0.126279 0.459948 4 Elias Lawrence 0.337239 0.124185 - - >>> normalized_data = f_675("test.csv") - >>> print(normalized_data.head()) - - Fruit Weight Amount - 0 Aplple 1 0.5 - 1 Mr. Leslie Casey 0.32140 0.998905 - 2 Anthony George 0.8998888 0.123784 - 3 Brian Washington 0.121222 0.445321 - 4 Elias Lawrence 0.345223 0 - """ df = pd.read_csv(file_name) if df.select_dtypes(include='number').empty: diff --git a/data/processed/f_694_simon_chien_edit_wo_doc.py b/data/processed/f_694_simon_chien_edit_wo_doc.py index 903b4d96..f518ff50 100644 --- a/data/processed/f_694_simon_chien_edit_wo_doc.py +++ b/data/processed/f_694_simon_chien_edit_wo_doc.py @@ -26,17 +26,6 @@ def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): Example: >>> f_694('/tmp/data.csv', 100) '/tmp/data.csv' - - >>> f_694('test.csv', 5, 2, random_seed=42) - 'test.csv' - >>> pd.read_csv('test.csv') - Feature_1 Feature_2 - 0 0.154163 0.740050 - 1 0.918747 0.900715 - 2 0.283828 0.606083 - 3 0.521226 0.552038 - 4 0.764560 0.020810 - """ np.random.seed(random_seed) df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), diff --git a/data/processed/f_723_simon_chien_edit_wo_doc.py b/data/processed/f_723_simon_chien_edit_wo_doc.py index 0e5d15e5..88a8aa0f 100644 --- a/data/processed/f_723_simon_chien_edit_wo_doc.py +++ b/data/processed/f_723_simon_chien_edit_wo_doc.py @@ -40,15 +40,6 @@ def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, see 165 166 Main hotel production nothing.\r\nCoach voice ... 810 811 Early right nature technology. Conference mind... 60 61 Interest require gas wall. Different it see fi... - - >>> result = f_723('sample.csv', column_name='data', sample_size=2) - >>> print(result) - index data - 125 126 Fund elephenat, the dinoasuar eat this language t... - 21 22 Such an important story banking at the house a da... - - - """ df = pd.read_csv(csv_file) matches = df[df[column_name].str.contains(pattern, na=False)] diff --git a/data/processed/f_724_simon_chien_edit_wo_doc.py b/data/processed/f_724_simon_chien_edit_wo_doc.py index 2d89b5e1..ce3625f7 100644 --- a/data/processed/f_724_simon_chien_edit_wo_doc.py +++ b/data/processed/f_724_simon_chien_edit_wo_doc.py @@ -42,13 +42,6 @@ def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): 59 60 3xanswer 60 61 5xgirl 61 62 5xkind - - >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column', pattern='kind') - >>> print(result) - id test_column - 20 21 To between successful ever ago PM toward today... - 42 43 Entire manage wife management perform size def... - 61 62 5xkind """ if not os.path.isfile(db_file): diff --git a/data/processed/f_725_simon_chien_edit_w_doc.py b/data/processed/f_725_simon_chien_edit_w_doc.py index 257f3d86..9280adb1 100644 --- a/data/processed/f_725_simon_chien_edit_w_doc.py +++ b/data/processed/f_725_simon_chien_edit_w_doc.py @@ -38,14 +38,6 @@ def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool ... }) >>> f_725(df, 'A', 'B', 'C') False - - >>> df = pd.DataFrame({ - ... 'TempA': [], - ... 'TempB': [], - ... 'TempC': [] - ... }) - >>> f_725(df, 'TempA', 'TempB', 'TempC') - True """ # Filter rows based on column_b and column_c filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] diff --git a/data/processed/f_728_simon_chien_edit_w_doc.py b/data/processed/f_728_simon_chien_edit_w_doc.py index b63b9bef..f32d76b5 100644 --- a/data/processed/f_728_simon_chien_edit_w_doc.py +++ b/data/processed/f_728_simon_chien_edit_w_doc.py @@ -45,38 +45,6 @@ def f_728(data): 3 Tom 20 88 7 Tom 20 86 9 Tom 20 85 - - >>> print(avg_scores) - Name - John 91.0 - Nick 80.0 - Tom 86.0 - Name: Score, dtype: float64 - >>> print(common_age) - 19 - - >>> data = { - ... 'Name': ['Simon', 'Alex', 'Tanja', 'Amanda', 'Tanja'], - ... 'Age': [21, 42, 54, 20, 54], - ... 'Score': [1, 1, 2, 3, 5] - ... } - >>> df, avg_scores, common_age = f_728(data) - >>> print(df) - Name Age Score - 1 Alex 42 1 - 3 Amanda 20 3 - 0 Simon 21 1 - 2 Tanja 54 2 - 4 Tanja 54 5 - >>> print(avg_scores) - Name - Alex 1.0 - Amanda 3.0 - Simon 1.0 - Tanja 3.5 - Name: Score, dtype: float64 - >>> print(common_age) - 54 """ if not all(key in data for key in ['Name', 'Age', 'Score']): diff --git a/data/processed/f_730_simon_chien_edit_wo_doc.py b/data/processed/f_730_simon_chien_edit_wo_doc.py index ddf0b410..aa9ed05c 100644 --- a/data/processed/f_730_simon_chien_edit_wo_doc.py +++ b/data/processed/f_730_simon_chien_edit_wo_doc.py @@ -28,14 +28,6 @@ def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: 0 Elena 13 Female 1 Tom 23 Male 0 Franko 12 Male - - >>> df = f_730('/path/to/data/directory', ['file1.csv', 'other_file.csv']) - >>> print(df.head()) - Name Age Gender Animal Size - 0 Simon 5 Male None None - 1 Bobby 32 Male None None - 0 Elena 13 Female None None - 2 None None None Tiger 12 """ merged_df = pd.DataFrame() diff --git a/data/processed/f_731_simon_chien_edit_wo_doc.py b/data/processed/f_731_simon_chien_edit_wo_doc.py index 79d35027..492b395c 100644 --- a/data/processed/f_731_simon_chien_edit_wo_doc.py +++ b/data/processed/f_731_simon_chien_edit_wo_doc.py @@ -34,15 +34,6 @@ def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: 1 Orange NaN 2 2 Avocado Bad 1.667 3 Coconut Tasty 2 - - >>> df = f_731("/path/to/data/directory", "test.csv") - >>> print(df) - Name Score - 0 Alex 25.2 - 1 Tanja 31.5 - 2 Maine 99 - 3 Lisa 100 - 4 Simone 63.925 """ file_path = os.path.join(data_dir, csv_file) try: diff --git a/data/processed/f_732_simon_chien_edit_wo_doc.py b/data/processed/f_732_simon_chien_edit_wo_doc.py index 12f72773..11e3bf9e 100644 --- a/data/processed/f_732_simon_chien_edit_wo_doc.py +++ b/data/processed/f_732_simon_chien_edit_wo_doc.py @@ -39,16 +39,6 @@ def f_732(data_dir, 21 Mouse 12 15 Elephant 1000 2 Tiger 500 - - >>> file_name, df = f_732('data', csv_files=['test1.csv', 'test2.csv'], seed=42) - >>> print(file_name) - 'test1.csv' - >>> print(df) - Name House Salary - 12 Simba mansion 11111 - 231 Dolores mansion 2222 - 135 Elaine shed 93274 - 21 Sophia garden 111 """ random.seed(seed) diff --git a/data/processed/f_733_simon_chien_edit_wo_doc.py b/data/processed/f_733_simon_chien_edit_wo_doc.py index a5d34845..541010c7 100644 --- a/data/processed/f_733_simon_chien_edit_wo_doc.py +++ b/data/processed/f_733_simon_chien_edit_wo_doc.py @@ -26,16 +26,12 @@ def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): - sklearn.linear_model - sklearn.model_selection + Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. + Example: >>> model, predictions = f_733("/path/to/data.csv", "target") >>> print(predictions) [123.45, ..., 126.78] - - >>> model, predictions = f_733("/path/to/test.csv", "target") - >>> print(predictions) - [1.2423, 4.2313, 28.2219, 10.3092] - - Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. """ df = pd.read_csv(csv_file_path) X = df.drop(columns=[attribute]) diff --git a/data/raw/f_657_data_simon/test_case_2/1.txt b/data/raw/f_657_data_simon/test_case_2/1.txt deleted file mode 100644 index 829cc563..00000000 --- a/data/raw/f_657_data_simon/test_case_2/1.txt +++ /dev/null @@ -1,5 +0,0 @@ -;lkjasd;lkfjs;ldkferrorjk -klkjsdf ERROR lkjasfd!!! - -err jlkj o re -error \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_2/2.txt b/data/raw/f_657_data_simon/test_case_2/2.txt deleted file mode 100644 index dfb1b07b..00000000 --- a/data/raw/f_657_data_simon/test_case_2/2.txt +++ /dev/null @@ -1,6 +0,0 @@ -asdf -3iuh3873 -asdfasdf -4i8y498jh4m4 -oh48h4 -oijhsaferroroinaoisjdf \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_2/subfolder1/3.txt b/data/raw/f_657_data_simon/test_case_2/subfolder1/3.txt deleted file mode 100644 index 8ed89da3..00000000 --- a/data/raw/f_657_data_simon/test_case_2/subfolder1/3.txt +++ /dev/null @@ -1,4 +0,0 @@ -ERROR -Erroor -Error -ErRoR diff --git a/data/raw/f_657_data_simon/test_case_3/folder/folder/folder/test.txt b/data/raw/f_657_data_simon/test_case_3/folder/folder/folder/test.txt deleted file mode 100644 index 65fbeb82..00000000 --- a/data/raw/f_657_data_simon/test_case_3/folder/folder/folder/test.txt +++ /dev/null @@ -1,4 +0,0 @@ -hi -how are you? -ERRRRRORRRRRR -error \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_4/file1.txt b/data/raw/f_657_data_simon/test_case_4/file1.txt deleted file mode 100644 index 21bcf248..00000000 --- a/data/raw/f_657_data_simon/test_case_4/file1.txt +++ /dev/null @@ -1 +0,0 @@ -Kind miss artist truth trouble behavior style. Ability management test during foot that course nothing. Sound central myself before year. Your majority feeling fact by four two. White owner onto knowledge other. First drug contain start almost wonder. Live bed serious theory type. Together type music hospital. Every speech support time operation wear often. Manage political record word group food break. Picture suddenly drug rule bring determine some forward. Beyond chair recently and. Plant view own available buy country store. Hospital have wonder already. Create wife responsibility. Decision song view age international big employee. Author feeling job article level. Beyond write current grow rule stuff truth college. Whom around put suddenly garden. Bring TV program actually race. Themselves true power home price check real. Score from animal exactly drive well good. Pull opportunity throughout take car. Hold increase practice ability court. Civil development large report purpose themselves. I reduce industry. Close ask reduce. Month maintain no sense this manager fine. Anyone state wind indeed nature white without. Would I question first. Add senior woman put partner. Budget power them evidence without. Little type physical against. Son break either president stage population boy. Everything affect American race. Fire happen nothing support suffer which parent. Republican total policy head Mrs debate onto. Catch even front. Responsibility full per among clearly word. Guess writer can boy room. Film tax rock. Idea foreign increase between training. EE Rr or Front something speech buy break marriage also. Same thus turn phone heart window. Assume be seek article. Better news face. Small research describe base detail yourself one. Issue grow ask tell. Total later quality. Market either political young. Perhaps bit learn gun still. Writer work chance image.errOr There many true follow marriage material. Myself use act relationship section. Stand along chance either six success. Family relationship like son might trip at candidate. Address such former claim chance. Why measure too maybe off question source. Wrong section town deal movement out stay lot. Parent do ten after those scientist. Medical effort assume teacher wall. Significant his himself clearly very. Expert stop area along individual. Three own bank recognize special good along. Hit another likely character allow pay picture. Record power crime situation. Book art red pass value practice wide. Fast support when hold family second whatever work. Million size country site. He couple ground place what top. Main too war. Occur democratic behavior standard thousand single recognize. Medical watch certainly through instead base. ERROR Indeed between similar safe. Social issue indicate. Try while reveal bad audience grow ahead. Concern store discover hand others century. Daughter purpose voice. \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_4/sf/error.txt b/data/raw/f_657_data_simon/test_case_4/sf/error.txt deleted file mode 100644 index 3f97a122..00000000 --- a/data/raw/f_657_data_simon/test_case_4/sf/error.txt +++ /dev/null @@ -1 +0,0 @@ -'But go computer pattern. ERROR ERROR Follow so second. Live teach movie I situation understand agree. Believe step four western likely almost training. Source where a front war receive civil single. Policy drug these person. Lot thousand question. Difficult player forget. Goal clear inside guy north. North add us accept hope soon. Affect station member although. Of challenge check skin. ERRORIndustry nothing rather. Two hair describe hundred candidate. Probably whom it job likely different house. However ok structure your those head against. Single theory everybody particularly test hospital personal move. Certain hotel should. Item bit leave left college listen. Opportunity you exist me. Generation pick she picture send cultural. Computer on fast play fact. ERROR Alone improve heart decade care book. Close issue huge away represent race. ERROR Nation network college debate direction moment. Ground think save respond friend budget while. Significant energy citizen physical why mouth. Treatment worker season figure. Get we memory who yet spend. Challenge on plant wonder. Blood single recently other owner message entire. Collection be along break gun reveal five put. Once effect main simply two no. Sister meet though ago. These military part decade. Enough onto try commercial kid approach. Upon beautiful open free medical lay. Per environmental medical nearly hour enter discover. City system story century attention attention. Not improve begin lot sport father. Speech catch ten statement carry next. Almost identify record. Make real use nice themselves gas best above. Marriage worker ever across great. Unit begin fight ERROR institution school white team. Discuss very score loss. Executive partner story budget great than. Prevent hear trouble it grow. Should research executive black tough building. General during cost what. Director race wish let left very your. Determine human find discussion military ability line hour. ERROR. Star issue any side image. Customer force both something hair. Well account movement can start. Various current scientist culture ability to suffer at. Unit dinner indeed listen. Job high she. West agreement girl happen front law care military. War resource evening realize per. Her of process cut. Matter nice really listen meeting language condition. Fund indicate help north friend state do. Go simple improve language thousand. If share land involve education little direction. Growth main use chance story. Could usually piece but. Choice already thank source. Hope could many cold another way sign. Thought radio minute rich consumer type. Old charge read management production up our allow. Toward such new cultural analysis care kitchen. Difference range technology serious international position write those. Management court head successful response hospital. Suffer life chair senior seem west. Week remain decide happy another image. Take one them in. Plant whether protect most trade. Herself Mr bad wide manage. Daughter pay growth. Seat should dream whose. Onto when happen. Election order everything stuff detail himself medical. Direction opportunity very analysis firm once. As year look floor role agency its. Seven those budget involve. Picture enough break. Health sort couple station my. Middle add traditional standard PM election. General yet party individual. Edge bit avoid yourself. Magazine minute site girl. Student describe concern professional discover cut recognize. Give especially win give yourself system. Letter finish campaign night simply. Bar standard final. Along particularly factor boy Republican without. Never ten become me story but order. You company drop join recognize all structure. Away wish finish within president. Any remain support realize cause. Argue home exactly different week treat mouth threat. Interest college anything relationship. Himself draw final station tough whether address. Moment buy east example identify likely. Clearly thought modern dark. Determine feeling certainly necessary. Name Congress ten return understand. During nice thing ok. Seat model similar sometimes. Reveal so suggest. Include herself hold his. Manage camera beat activity stay bank which. Second late every include generation. Election baby seven only scientist teach. Memory attack land. Report to product outside they eye out. Project happen white maybe read card. Southern very talk part fund feeling cause. Visit red forward table owner. Official receive may push serious successful. Issue enter cold cell our. Sing media step live game item lay. Appear role why learn attention cut billion. Without wall stay describe time. Community whole growth speech across yard admit parent.' \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_4/sf/file2.txt b/data/raw/f_657_data_simon/test_case_4/sf/file2.txt deleted file mode 100644 index 00a8735f..00000000 --- a/data/raw/f_657_data_simon/test_case_4/sf/file2.txt +++ /dev/null @@ -1 +0,0 @@ -Fall ready usually. Teacher cost both general where. Agreement decade friend which. View when player contain year. Mouth film heavy chair. Source firm drug senior. Head production technology over hour. Car federal indicate unit. Instead PM be know hard we around impact. While top kid he weight before. Someone everybody newspaper read. Up control instead company where future model. Leg PM low data ability recognize. Left despite boy without feeling participant interest seem. Question set discussion seven. Place again establish protect a. Moment ok how bill education exist. Other along society figure future. Teacher three seven attention team executive care. Phone most improve play idea sing small. Kind nothing case but building. Opportunity cause property government line indeed. Major maybe manage when know central many. Democratic green hospital year suffer without rather bank. According American per yourself their record. Cell of course its respond. Collection instead today itself language remember. Firm decade cost glass work interview man. Somebody keep daughter report town. Doctor be cost. Sea quality do father. Relate mention expect there. In approach recent program possible natural. Ok majority region democratic entire analysis. Glass face according as. Quite wife however TV law fund. Paper beat five movie. Eight miss couple bag thank generation. Economy rock feeling might his. Gas Republican and various authority leave right. True include management. Brother bank better she increase try. Partner stand next though house where. Majority campaign that various floor. Blue tonight particular smile represent since. Painting movie end discussion budget situation between. Eat expect save process score middle. Business population brother. Grow push region produce develop story drive. Film force health lose old case administration. Foreign one agent candidate how wish member. Career deal trip market. Dog car do his part. Pick too blue street. Other majority final when new clear these. National region bad case I course first. Himself arrive although risk which. Television protect process difficult general class. Ground much run. Government make article drop. Difficult president at. General professional career two. Itself group computer forget would section him. Through move source wonder relate service. Tv important hope about catch than method. Bag down stock computer. What to sea. Collection bad until our per leader change. Always future scene heavy personal threat many group. Leader medical class send. Establish manage hotel financial too nearly. Significant now energy. Lay return identify. Anything event yet effect quite reflect upon. Several might history strong economy. Not forget why under. Purpose mouth then class test check suffer star. Staff north weight guy personal follow situation over. A cell analysis call. We lose water. Section could nature interest wear. Morning as police often. Place myself his itself find add sing. Watch Mrs never wrong couple. Social suddenly seek choice produce type. His less already treatment PM sometimes set. Gun word citizen. Piece physical market room eat. Ever war unit back large. Tell time special beyond could key assume. Play wait education think similar particular. Film manage several dark. Hit simple personal home they although. Great notice north everything state huge TV. Among not girl above. Response provide likely fire subject. Fire town worker. Image central challenge term memory. By care lose politics. Role mind statement. Hold conference son spend ball company enter son. Red effect else very your choice. Need although one political almost serious stand. Cover social particularly speech. City four pretty live new myself star. Able simple billion parent now from. Mention would technology budget first age. Affect though cover including. Recognize someone treatment over. Group strong back approach. Page per eight finally support law. Air sort couple hold. \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_5/many_error.txt b/data/raw/f_657_data_simon/test_case_5/many_error.txt deleted file mode 100644 index 6ef0d16e..00000000 --- a/data/raw/f_657_data_simon/test_case_5/many_error.txt +++ /dev/null @@ -1 +0,0 @@ -error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error error \ No newline at end of file diff --git a/data/raw/f_657_data_simon/test_case_6/1.txt b/data/raw/f_657_data_simon/test_case_6/1.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_657_data_simon/test_case_6/2.png b/data/raw/f_657_data_simon/test_case_6/2.png deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_657_data_simon/test_case_6/3.jpg b/data/raw/f_657_data_simon/test_case_6/3.jpg deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_657_data_simon/test_case_6/4,py b/data/raw/f_657_data_simon/test_case_6/4,py deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_657_data_simon/test_case_6/folder/3.cpp b/data/raw/f_657_data_simon/test_case_6/folder/3.cpp deleted file mode 100644 index 760589cb..00000000 --- a/data/raw/f_657_data_simon/test_case_6/folder/3.cpp +++ /dev/null @@ -1 +0,0 @@ -error \ No newline at end of file diff --git a/data/raw/f_657_simon.py b/data/raw/f_657_simon.py deleted file mode 100644 index e2e76ef5..00000000 --- a/data/raw/f_657_simon.py +++ /dev/null @@ -1,117 +0,0 @@ -import re -import os -import glob - - -def f_657(dir_path): - """ - Search for occurrences of the word "error" in all text files within a - specified directory and its subdirectories. - - Parameters: - dir_path (str): The path of the directory. - - Returns: - dict: A dictionary with relative file paths as keys and the count of - occurrences of the word "error" as values. - - Raises: - - ValueError: If directory in dir_path does not exist. - - Requirements: - - re: For regex pattern matching. - - os: For retrieving relative file paths. - - glob: For fetching all text file paths in the directory. - - The function specifically searches for the word "error" in text files - (with the extension ".txt"). - This function is NOT case sensitive, e.g. also "ERROr" will be counted. - - Example: - >>> f_657("/path/to/directory") - {'file1.txt': 2, 'subdir/file2.txt': 1} - - >>> f_657("/path/to/directory") - {'test.txt': 245, 'subdir/test2.txt': 0, 'subdir/sf/test3.txt': 1} - """ - - if not os.path.isdir(dir_path): - raise ValueError("Specified directory does not exist.") - - result = {} - file_paths = glob.glob(f'{dir_path}/**/*.txt', recursive=True) - for file_path in file_paths: - with open(file_path, 'r') as file: - content = file.read() - matches = re.findall(r'\berror\b', content, re.IGNORECASE) - # Always set the file's count in the result dictionary, even if it's 0 - result[os.path.relpath(file_path, dir_path)] = len(matches) - - return result - -import unittest -import os - - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -class TestCases(unittest.TestCase): - - def test_non_existent(self): - 'non existent folder' - self.assertRaises( - Exception, f_657, os.path.join("f_657_data_simon", "non_existent") - ) - - def test_case_1(self): - 'empty folder' - result = f_657(os.path.join("f_657_data_simon", "test_case_1")) - self.assertEqual(result, {}, "Test Case 1 Failed") - - def test_case_2(self): - 'test data 2' - result = f_657(os.path.join("f_657_data_simon", "test_case_2")) - path1 = '1.txt' - path2 = '2.txt' - path3 = os.path.join('subfolder1', '3.txt') - expected = {path1: 2, path2: 0, path3: 3} - - self.assertEqual(result, expected, "Test Case 2 Failed") - - def test_case_3(self): - 'nested subfolders' - result = f_657(os.path.join("f_657_data_simon", "test_case_3")) - path = os.path.join('folder', 'folder', 'folder', 'test.txt') - expected = {path: 1} - self.assertEqual(result, expected, "Test Case 3 Failed") - - def test_case_4(self): - 'realistic text with error sprinkled in' - result = f_657(os.path.join("f_657_data_simon", "test_case_4")) - path1 = os.path.join('sf', 'file2.txt') - path2 = os.path.join('sf', 'error.txt') - - expected = {"file1.txt": 2, path1: 0, path2: 6} - self.assertEqual(result, expected, "Test Case 4 Failed") - - def test_case_5(self): - 'txt file containes a lot of error' - result = f_657(os.path.join("f_657_data_simon", "test_case_5")) - expected = {"many_error.txt": 2650} - self.assertEqual(result, expected, "Test Case 5 Failed") - - def test_case_6(self): - 'other file extensions' - result = f_657(os.path.join("f_657_data_simon", "test_case_6")) - expected = {"1.txt": 0} - self.assertEqual(result, expected, "Test Case 5 Failed") - - - -if __name__ == "__main__": - run_tests() diff --git a/data/raw/f_657_simon_chien_edit.py b/data/raw/f_657_simon_chien_edit.py new file mode 100644 index 00000000..ea6c685b --- /dev/null +++ b/data/raw/f_657_simon_chien_edit.py @@ -0,0 +1,141 @@ +import re +import os +import glob + + +def f_657(dir_path): + """ + Search for occurrences of the word "error" in all text files within a + specified directory and its subdirectories. + + Parameters: + dir_path (str): The path of the directory. + + Returns: + dict: A dictionary with relative file paths as keys and the count of + occurrences of the word "error" as values. + + Raises: + - ValueError: If directory in dir_path does not exist. + + Requirements: + - re: For regex pattern matching. + - os: For retrieving relative file paths. + - glob: For fetching all text file paths in the directory. + + The function specifically searches for the word "error" in text files + (with the extension ".txt"). + This function is NOT case sensitive, e.g. also "ERROr" will be counted. + + Example: + >>> f_657("/path/to/directory") + {'file1.txt': 2, 'subdir/file2.txt': 1} + """ + + if not os.path.isdir(dir_path): + raise ValueError("Specified directory does not exist.") + + result = {} + file_paths = glob.glob(f'{dir_path}/**/*.txt', recursive=True) + for file_path in file_paths: + with open(file_path, 'r') as file: + content = file.read() + matches = re.findall(r'\berror\b', content, re.IGNORECASE) + # Always set the file's count in the result dictionary, even if it's 0 + result[os.path.relpath(file_path, dir_path)] = len(matches) + + return result + + +import unittest +import os +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to simulate test environments + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_file(self, sub_path, content=""): + # Helper method to create a file with given content + full_path = os.path.join(self.test_dir, sub_path) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, 'w') as file: + file.write(content) + # Return normalized path for cross-platform compatibility + return os.path.normpath(sub_path) + + def test_non_existent(self): + # Expect ValueError for non-existent directory + with self.assertRaises(ValueError): + f_657(os.path.join(self.test_dir, "non_existent")) + + def test_empty_folder(self): + # Test empty directory + result = f_657(self.test_dir) + self.assertEqual(result, {}) + + def test_files_with_errors(self): + # Files with varying counts of 'error' + files = { + "1.txt": "error\nERROR\nErrOr", + "subfolder1/2.txt": "", + "subfolder2/3.txt": "error\nerror error" + } + expected = { + os.path.normpath("1.txt"): 3, + os.path.normpath("subfolder1/2.txt"): 0, + os.path.normpath("subfolder2/3.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + def test_case_sensitive_and_realistic_text(self): + # More complex scenarios, including nested directories + file_path = self.create_file('nested/folder1/folder2/error_log.txt', 'Error\nerror\nERROR') + expected = {file_path: 3} + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + def test_exact_word_matching(self): + # Ensure only the exact word 'error' is counted and ignore similar words like 'errors' + files = { + "file1.txt": "error error error", # Should count 3 times + "subdir/file2.txt": "errors error erro errors", # Should count 1 time + "subdir2/nested/file3.txt": "an error occurred", # Should count 1 time + "subdir3/file4.txt": "no errors here", # Should count 0 times + "subdir3/file5.txt": "Error and ERROR and error" # Should count 3 times, case insensitive + } + expected = { + os.path.normpath("file1.txt"): 3, + os.path.normpath("subdir/file2.txt"): 1, + os.path.normpath("subdir2/nested/file3.txt"): 1, + os.path.normpath("subdir3/file4.txt"): 0, + os.path.normpath("subdir3/file5.txt"): 3 + } + for path, content in files.items(): + self.create_file(path, content) + + result = f_657(self.test_dir) + self.assertEqual(result, expected) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_661_data_simon/f_661_linear_regression_test.csv b/data/raw/f_661_data_simon/f_661_linear_regression_test.csv deleted file mode 100644 index 55504a8d..00000000 --- a/data/raw/f_661_data_simon/f_661_linear_regression_test.csv +++ /dev/null @@ -1,11 +0,0 @@ -x,y -0,3 -1,5 -2,7 -3,9 -4,11 -5,13 -6,15 -7,17 -8,19 -9,21 diff --git a/data/raw/f_661_data_simon/test_data.csv b/data/raw/f_661_data_simon/test_data.csv deleted file mode 100644 index ee7e9e66..00000000 --- a/data/raw/f_661_data_simon/test_data.csv +++ /dev/null @@ -1,19 +0,0 @@ -id,title,description -1,Member term whole listen American.,"Affect race tonight data type. Could less mean past story big summer important. Still I rock draw. -Film network party when own color. Rich discuss join anyone bar answer threat." -2,Art set man store section.,Nor officer west try. Us window size remain while their size. Only fund star hear rise bank. -3,Woman line black simple guess.,"Charge place southern pressure many. Heavy large share nice statement. Different sometimes itself project. -Art life address you number see. Guy road news. -Attention page quite common me summer." -4,Ago news think trouble out then.,"Research along model stand however then you. -This side often me. Thank event full past big item. Probably head decide me Democrat bad. Reality nation firm compare beat side pretty." -5,Perform next believe speech agree field catch.,"Represent adult shoulder. Talk ok lot information small site it. -Owner serious clearly lead several property. Find agent lawyer reduce involve mean." -6,Dream stand common beyond alone.,"Article five act matter discuss sea. Sea glass year whom energy book. -Analysis law society woman. International everything quite around." -7,Course near list this such.,"Rise send including car unit. Eat adult Congress value key system less. North free cold certain. -Fund free effort support. Help interesting various bed. High happy throw plant gun enter test." -8,Maintain likely accept word.,Full owner whole security south skin true. Wide great because put fine. Appear assume administration mission particular notice because. -9,Sea deep herself different.,Base coach manage and never month eat. During surface he almost describe number hundred. Box mind with challenge stand ago positive. -10,Matter toward or base market.,"Everybody computer guy magazine girl issue teach million. -Country onto certainly several accept research spend. Beat us fund about along I common. Wife region late interview customer writer." diff --git a/data/raw/f_661_simon.py b/data/raw/f_661_simon_chien_edit.py similarity index 75% rename from data/raw/f_661_simon.py rename to data/raw/f_661_simon_chien_edit.py index 07087caf..a03c5c79 100644 --- a/data/raw/f_661_simon.py +++ b/data/raw/f_661_simon_chien_edit.py @@ -1,126 +1,129 @@ -import pandas as pd - -import pandas as pd -from sklearn.linear_model import LinearRegression - -def f_661(file_path, output_path=None, sort_key='title', linear_regression=False, x_column=None, y_column=None): - """ - Sorts a CSV file by a specific column key using pandas, and optionally writes the sorted data to another CSV file. - Can also fit a linear regression model to specified columns if required. - - Parameters: - file_path (str): The path to the input CSV file. This parameter is required. - output_path (str): The path where the sorted CSV will be saved. If not provided, the function won't save the sorted dataframe. - sort_key (str): The column name used as a key to sort the CSV file. Defaults to 'title'. - linear_regression (bool): If True, fits a linear regression model to the specified columns. Defaults to False. - x_column (str): The name of the column to use as the predictor variable for linear regression. - y_column (str): The name of the column to use as the response variable for linear regression. - - Returns: - DataFrame, str, or LinearRegression model: The sorted pandas DataFrame if 'output_path' is None and 'linear_regression' is False, - otherwise the path to the saved output file. If 'linear_regression' is True, returns the fitted model. - - Requirements: - - pandas - - scikit-learn - - Example: - >>> model = f_661('data.csv', sort_key='title', linear_regression=True, x_column='age', y_column='salary') - >>> # Returns a fitted LinearRegression model based on 'age' and 'salary' columns. - - Raises: - Exception: If there is an error in reading, sorting the data, or fitting the model. - """ - try: - df = pd.read_csv(file_path) - df.sort_values(by=[sort_key], inplace=True) - - if linear_regression: - if x_column not in df.columns or y_column not in df.columns: - raise ValueError("Specified columns for linear regression do not exist in the dataframe") - - X = df[[x_column]] - y = df[y_column] - model = LinearRegression().fit(X, y) - return model - - if output_path: - df.to_csv(output_path, index=False) - return output_path - else: - return df - except Exception as e: - raise Exception(f"Error while processing the file: {str(e)}") - - -import unittest -import pandas as pd -import numpy as np -import os -import shutil - -class TestCases(unittest.TestCase): - - test_csv_path = os.path.join('f_661_data_simon', 'test_data.csv') - - def tearDown(self) -> None: - - if os.path.exists('sorted_f_661_data_simon'): - shutil.rmtree('sorted_f_661_data_simon') - - def test_valid_input_no_output_path(self): - # Test with valid input, no output file specified (should return DataFrame) - df = f_661(self.test_csv_path, sort_key='title') - self.assertIsInstance(df, pd.DataFrame) - - def test_sorting_functionality(self): - # Test if sorting is done correctly - df = f_661(self.test_csv_path, sort_key='title') - self.assertTrue(df['title'].is_monotonic_increasing) - - def test_invalid_file_path(self): - # Test with invalid file path (should raise an exception) - with self.assertRaises(Exception): - f_661('non_existent.csv') - - def test_invalid_sort_key(self): - # Test with invalid sort key (should raise an exception) - with self.assertRaises(Exception): - f_661(self.test_csv_path, sort_key='non_existent_column') - - def test_output_data_saving(self): - os.mkdir('sorted_f_661_data_simon') - # Test if the function saves the sorted data correctly when an output path is provided - output_path = 'sorted_f_661_data_simon/test_data.csv' - result_path = f_661(self.test_csv_path, output_path=output_path, sort_key='title') - self.assertEqual(result_path, output_path) - # Check if the file is created and is not empty - with open(output_path, 'r') as file: - self.assertGreater(len(file.read()), 0) - - def test_linear_regression_functionality(self): - # Test if linear regression model is fitted correctly - # Fit model using the function - model = f_661('f_661_data_simon/f_661_linear_regression_test.csv', sort_key='x', linear_regression=True, x_column='x', y_column='y') - self.assertIsInstance(model, LinearRegression) - - # Check if coefficients are as expected (approximate) - np.testing.assert_almost_equal(model.coef_, [2], decimal=1) - np.testing.assert_almost_equal(model.intercept_, 3, decimal=1) - - def test_linear_regression_error_on_invalid_columns(self): - # Test error handling for non-existent columns in linear regression - with self.assertRaises(Exception): - f_661(self.test_csv_path, linear_regression=True, x_column='nonexistent', y_column='title') - - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -# This is required to run the test cases -if __name__ == "__main__": - run_tests() \ No newline at end of file +import pandas as pd +from sklearn.linear_model import LinearRegression + + +def f_661(file_path, output_path=None, sort_key='title', linear_regression=False, x_column=None, y_column=None): + """ + Sorts a CSV file by a specific column key using pandas, and optionally writes the sorted data to another CSV file. + Can also fit a linear regression model to specified columns if required. + + Parameters: + file_path (str): The path to the input CSV file. This parameter is required. + output_path (str): The path where the sorted CSV will be saved. If not provided, the function won't save the sorted dataframe. + sort_key (str): The column name used as a key to sort the CSV file. Defaults to 'title'. + linear_regression (bool): If True, fits a linear regression model to the specified columns. Defaults to False. + x_column (str): The name of the column to use as the predictor variable for linear regression. + y_column (str): The name of the column to use as the response variable for linear regression. + + Returns: DataFrame, str, or LinearRegression model: The sorted pandas DataFrame if 'output_path' is None and + 'linear_regression' is False, otherwise the path to the saved output file. If 'linear_regression' is True, + returns the fitted model. + + Requirements: + - pandas + - scikit-learn + + Example: + >>> model = f_661('data.csv', sort_key='title', linear_regression=True, x_column='age', y_column='salary') + >>> # Returns a fitted LinearRegression model based on 'age' and 'salary' columns. + + Raises: + Exception: If there is an error in reading, sorting the data, or fitting the model. + """ + try: + df = pd.read_csv(file_path) + df.sort_values(by=[sort_key], inplace=True) + + if linear_regression: + if x_column not in df.columns or y_column not in df.columns: + raise ValueError("Specified columns for linear regression do not exist in the dataframe") + + X = df[[x_column]] + y = df[y_column] + model = LinearRegression().fit(X, y) + return model + + if output_path: + df.to_csv(output_path, index=False) + return output_path + else: + return df + except Exception as e: + raise Exception(f"Error while processing the file: {str(e)}") + + +import unittest +import pandas as pd +import numpy as np +import os +import shutil +import tempfile + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory for test files + self.test_dir = tempfile.mkdtemp() + self.test_csv_path = os.path.join(self.test_dir, 'test_data.csv') + # Create a sample CSV file + df = pd.DataFrame({ + 'title': ['Book C', 'Book A', 'Book B'], + 'x': [1, 2, 3], + 'y': [5, 7, 9] + }) + df.to_csv(self.test_csv_path, index=False) + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def test_valid_input_no_output_path(self): + # Test with valid input, no output file specified (should return DataFrame) + df = f_661(self.test_csv_path, sort_key='title') + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue(df['title'].is_monotonic_increasing) + + def test_invalid_file_path(self): + # Test with invalid file path (should raise an exception) + with self.assertRaises(Exception): + f_661(os.path.join(self.test_dir, 'non_existent.csv')) + + def test_invalid_sort_key(self): + # Test with invalid sort key (should raise an exception) + with self.assertRaises(Exception): + f_661(self.test_csv_path, sort_key='non_existent_column') + + def test_output_data_saving(self): + # Test if the function saves the sorted data correctly when an output path is provided + output_path = os.path.join(self.test_dir, 'sorted_data.csv') + result_path = f_661(self.test_csv_path, output_path=output_path, sort_key='title') + self.assertEqual(result_path, output_path) + # Check if the file is created and is not empty + self.assertTrue(os.path.exists(output_path)) + self.assertGreater(os.stat(output_path).st_size, 0) + + def test_linear_regression_functionality(self): + # Test if linear regression model is fitted correctly + model = f_661(self.test_csv_path, linear_regression=True, x_column='x', y_column='y') + self.assertIsInstance(model, LinearRegression) + # Check if coefficients are as expected (approximate) + np.testing.assert_almost_equal(model.coef_, [2], decimal=1) + np.testing.assert_almost_equal(model.intercept_, 3, decimal=1) + + def test_linear_regression_error_on_invalid_columns(self): + # Test error handling for non-existent columns in linear regression + with self.assertRaises(Exception) as context: + f_661(self.test_csv_path, linear_regression=True, x_column='nonexistent', y_column='title') + self.assertIn("Specified columns for linear regression do not exist in the dataframe", str(context.exception)) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +# This is required to run the test cases +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_663_data_simon/test_data1.csv b/data/raw/f_663_data_simon/test_data1.csv deleted file mode 100644 index bf1e5fa9..00000000 --- a/data/raw/f_663_data_simon/test_data1.csv +++ /dev/null @@ -1,1001 +0,0 @@ -title,title_url,id,category,published_time -Agent every development say.,http://johnson.com/,65,Business,1971-06-07 13:23:12.735733+00:00 -Respond red information last everything.,http://www.henderson.com/,14,Sports,1990-06-26 16:37:24.403278+00:00 -Suggest page southern role movie win.,https://www.carter.com/,74,Technology,2007-12-27 17:35:46.689588+00:00 -Product significant world.,http://www.ramirez-reid.com/,21,Health,1989-02-26 03:27:39.695916+00:00 -Reduce raise author play move.,http://www.gray-mayo.net/,29,Health,2015-06-11 19:18:17.588655+00:00 -Director allow firm environment.,http://rios.info/,19,Health,1977-07-12 15:51:16.973552+00:00 -Much section investment on gun young catch.,https://www.callahan.com/,88,Science,2002-02-26 17:52:00.983534+00:00 -High you more wife team activity.,http://peterson.com/,93,Health,2022-05-26 20:10:26.291398+00:00 -Medical blood personal success.,http://www.farmer-ryan.net/,1,Business,1987-06-24 10:06:32.068641+00:00 -Born guy world.,https://www.allen.com/,11,Science,2013-12-30 11:35:02.377016+00:00 -She campaign little near enter their institution.,http://www.collins.info/,67,Science,1976-07-10 20:48:23.109989+00:00 -Ask again network open according.,http://www.whitehead.com/,66,Technology,1985-01-05 21:28:19.089733+00:00 -Democrat car very number line six space.,http://www.hopkins.biz/,53,Science,2016-07-29 01:07:13.007013+00:00 -Pull worker better.,https://wright.net/,58,Technology,1992-09-29 21:00:54.219137+00:00 -Leg themselves away space task.,http://www.kidd.com/,22,Science,1996-03-08 10:55:10.357283+00:00 -Ten total clearly able hospital.,https://horton-cross.net/,85,Science,1978-05-07 07:49:26.458274+00:00 -Design around save article finish.,http://calhoun.net/,11,Technology,1973-09-11 08:26:27.295083+00:00 -Push dog build three.,http://www.miller.com/,34,Technology,2006-02-09 12:48:06.807628+00:00 -Draw how public feel first sell.,http://www.howell-hart.com/,10,Business,1981-06-29 12:38:40.876408+00:00 -Car give attention each expect.,https://riggs.net/,39,Sports,2020-08-15 19:34:21.701177+00:00 -Enough threat score choice.,https://www.cooper.net/,63,Health,2018-10-24 06:39:40.471602+00:00 -Begin interest everybody.,http://walsh.com/,95,Science,1999-09-29 22:18:14.947455+00:00 -Night born war real chance along.,https://www.anderson-parker.com/,46,Technology,2006-10-10 13:14:59.535514+00:00 -Government nice themselves wind.,http://www.cherry.biz/,4,Technology,2009-09-17 17:54:13.405033+00:00 -Site in prove same easy city.,http://garrison.com/,59,Health,1986-06-15 22:48:42.431076+00:00 -Development process huge everything attorney.,https://hicks.com/,4,Sports,2017-04-15 00:28:08.009121+00:00 -Condition while enter board its.,https://joyce.com/,33,Sports,2008-03-19 16:56:56.650630+00:00 -Mind southern rather.,http://jones-beck.com/,80,Health,2005-10-07 17:06:39.700701+00:00 -Rest feel finally impact.,https://martinez.com/,79,Business,1986-03-24 10:26:26.425073+00:00 -Street fight decision size parent focus kid.,http://jackson.org/,43,Sports,2014-02-15 17:58:04.336278+00:00 -Pull different current agency each.,https://www.perry-soto.com/,92,Science,1996-08-27 11:44:03.317327+00:00 -Challenge reach throughout team those.,https://www.brock.net/,60,Sports,2000-01-20 16:13:14.129933+00:00 -Lead soon property write.,https://briggs.net/,58,Technology,2010-02-04 09:13:35.880130+00:00 -Late seem style everyone sing machine dream.,https://www.mitchell.net/,11,Technology,1978-02-18 05:20:25.765173+00:00 -Really relationship ask imagine my indeed.,http://stein-silva.com/,98,Business,1990-07-05 15:03:49.581515+00:00 -Wall fear hope.,https://www.robinson-graham.com/,50,Health,2006-01-23 17:57:49.173010+00:00 -Right subject try wonder move trade.,http://www.patterson.com/,49,Health,1981-05-31 02:32:22.473847+00:00 -Front they everything week instead strong.,https://www.ryan.com/,100,Sports,2010-08-31 16:28:16.585164+00:00 -Current after charge call prove.,http://scott.com/,100,Technology,2022-01-05 16:45:50.688411+00:00 -Official up office traditional.,https://gonzalez.com/,14,Health,2015-11-05 08:12:32.153880+00:00 -South someone not American mouth product attention positive.,http://garcia-yu.info/,91,Technology,1993-06-24 19:13:36.504904+00:00 -Whole establish space Mrs low itself room.,https://rivas.com/,58,Technology,2010-06-18 16:52:50.855238+00:00 -Particular her agreement surface consider deep something.,https://haynes.com/,11,Technology,2008-10-28 00:31:03.459342+00:00 -New drive live Republican large admit family.,https://www.beck-wood.net/,68,Health,1992-12-10 04:55:13.861355+00:00 -Former reflect even edge building court build.,http://www.rodgers.org/,98,Business,2002-03-06 06:16:11.985495+00:00 -Strategy fast guess few.,http://mccann.com/,90,Technology,2004-05-23 04:55:11.488706+00:00 -Bill activity expect long future whole education.,https://www.morales.org/,81,Sports,1978-03-07 20:58:17.287589+00:00 -We be easy newspaper indicate other peace.,https://evans-chapman.biz/,40,Business,2003-06-26 12:55:23.936599+00:00 -Whether between several personal enough ball dream necessary.,http://www.hayes-wheeler.com/,5,Technology,1985-07-14 11:39:53.097566+00:00 -Reflect law reach under skin.,http://stephens.com/,35,Technology,1973-11-07 05:36:03.793716+00:00 -Society organization station TV.,https://www.barker-lyons.com/,65,Business,1996-08-16 00:41:38.938712+00:00 -Our threat same page.,http://www.hayden.org/,3,Health,2001-01-25 16:54:48.461761+00:00 -Like prepare trouble consider.,https://harrison.com/,42,Sports,2016-03-30 20:25:16.533179+00:00 -In many production hundred set already.,http://morris-cordova.biz/,66,Sports,2005-06-16 22:22:59.381502+00:00 -Left approach million performance material kind appear.,https://www.kim-wolfe.com/,4,Business,2013-02-04 10:21:27.847310+00:00 -Financial add impact different.,http://holland.info/,92,Health,1997-06-08 22:15:53.442461+00:00 -Indeed lot line lead certain.,http://www.hill.com/,65,Health,2014-03-29 14:45:31.046667+00:00 -Rise study oil process tend.,https://cook-hines.biz/,59,Health,2016-11-19 13:44:03.938461+00:00 -Occur do simply analysis seat.,https://sparks.com/,17,Business,2022-01-18 03:50:42.323282+00:00 -Thank them key moment lead reveal.,https://www.harrison.com/,80,Science,2005-01-13 20:44:33.926839+00:00 -System teacher here first responsibility service.,http://www.welch.com/,96,Sports,1993-05-25 22:53:30.683997+00:00 -Set region beyond coach.,https://www.nguyen.com/,46,Health,1993-03-22 06:29:23.316680+00:00 -Most not society color bad.,https://www.durham-thomas.info/,82,Health,1994-10-12 01:27:45.067844+00:00 -Together let explain.,http://www.landry-huerta.com/,82,Science,2007-03-21 22:51:01.798014+00:00 -Suddenly current his low down occur offer.,https://www.foster.com/,78,Science,2014-11-26 14:58:02.641189+00:00 -Simply discover soon despite couple.,https://www.davis.info/,5,Business,1989-09-03 00:07:22.995882+00:00 -Bed far section compare.,http://shaw.info/,38,Health,2013-05-12 07:04:31.862599+00:00 -While cell whom team right woman whose.,https://www.smith-herrera.info/,48,Health,2001-07-08 09:50:26.183392+00:00 -Board doctor agent this no trip.,http://www.colon.biz/,15,Technology,1972-06-11 23:44:10.077276+00:00 -Fight manage best.,http://watson-avery.net/,54,Business,2010-01-08 01:58:32.056444+00:00 -Those pressure street past.,https://www.wood.com/,13,Health,1999-05-10 20:34:14.507999+00:00 -Art every why we station begin.,http://www.buchanan.com/,73,Technology,1973-09-13 02:25:08.170855+00:00 -Debate democratic thank forget challenge too able.,http://www.king.com/,17,Sports,1989-04-25 15:53:01.894611+00:00 -Open administration enter camera inside.,http://www.west-cox.org/,14,Science,1997-03-01 19:25:22.183334+00:00 -Analysis situation term miss leader who article look.,https://rodriguez.com/,42,Business,1977-12-31 01:17:29.908085+00:00 -Every plan nature foot.,https://jacobs.net/,93,Technology,1993-03-10 02:57:25.080164+00:00 -Including still human role fine.,https://mckay-mcdaniel.info/,9,Sports,2014-11-05 14:40:41.886001+00:00 -It season head candidate.,http://martinez.biz/,93,Sports,2022-03-19 02:59:04.728995+00:00 -Fine billion medical choice lot suggest glass news.,http://perez.org/,79,Business,1971-06-02 00:02:35.372819+00:00 -Ahead event several TV go.,http://www.sandoval.com/,12,Business,1981-08-07 11:45:07.813828+00:00 -Law church find food walk other.,http://www.webster.com/,35,Science,2005-09-09 04:23:21.640793+00:00 -Parent increase democratic mention generation book question.,https://walker.com/,1,Technology,1986-04-02 16:19:44.654485+00:00 -Should share face build market.,https://www.james.com/,65,Business,2014-12-06 18:30:32.209574+00:00 -Author technology amount affect TV television.,https://bowman.com/,22,Business,1994-10-22 10:15:00.416111+00:00 -Because such during open model how.,https://cook.com/,66,Sports,1998-08-10 21:35:17.378647+00:00 -Town glass road standard spring.,http://www.scott.org/,20,Sports,1979-07-21 10:30:56.162088+00:00 -Management senior service large under north play person.,https://www.jones-ramos.com/,36,Business,1973-01-11 03:58:34.231475+00:00 -Avoid left also hard expert popular within.,http://carpenter-best.com/,58,Business,2005-02-22 06:21:30.888584+00:00 -Painting may whatever late specific study.,http://www.wright.info/,68,Business,2002-11-17 13:14:24.728774+00:00 -Health executive impact full our smile.,https://www.davis-gregory.com/,33,Business,2017-01-31 00:00:53.292964+00:00 -Bank professional true financial prevent product.,http://www.hansen-schneider.com/,17,Business,1975-01-15 07:35:18.558366+00:00 -Meet adult final week game she.,http://www.moore.com/,65,Business,2015-01-10 12:58:31.419500+00:00 -Also from short capital heavy class story.,http://jenkins.com/,57,Business,1985-05-12 22:10:35.328734+00:00 -Doctor dream whole six tough create question.,https://strickland-shaw.biz/,21,Technology,2013-03-14 17:49:56.900155+00:00 -Their appear lose he.,http://www.miller-martin.info/,66,Technology,2014-10-22 07:56:58.962702+00:00 -Watch develop later glass.,http://sanders.com/,50,Sports,1990-03-19 22:37:06.519400+00:00 -Five between research.,https://www.johnson.com/,98,Science,1994-03-10 01:17:58.761939+00:00 -Town affect democratic.,http://parsons.com/,16,Sports,1982-11-09 10:53:26.379558+00:00 -Lead book toward others administration middle drop century.,https://www.gomez.com/,68,Health,1973-08-25 18:29:29.811405+00:00 -Name skill medical after them analysis hit.,https://smith.com/,99,Business,2009-08-21 14:52:41.099319+00:00 -Card good full poor store range.,https://www.brown.com/,38,Technology,1972-05-12 15:39:02.940541+00:00 -Though firm financial huge spring.,https://mullins.com/,46,Sports,2016-09-05 16:10:29.245553+00:00 -Event son join hundred keep what.,https://www.holt-richardson.net/,35,Sports,2009-03-14 23:59:38.413192+00:00 -Issue yet Congress family.,http://www.guerrero-parker.org/,92,Science,1978-12-26 15:22:52.953735+00:00 -Goal amount thank good your ever.,http://www.walsh.com/,4,Technology,2015-10-04 08:42:36.686323+00:00 -Alone attack sing hand him note.,http://watson-hines.org/,18,Business,1989-10-14 03:17:18.380788+00:00 -Power economic box save material hit.,http://padilla-lewis.org/,37,Science,1980-10-16 23:57:44.025909+00:00 -Table later together knowledge.,http://reed.com/,21,Technology,2012-10-16 19:29:42.634325+00:00 -Difference world society door management guess occur.,https://www.henson.info/,68,Science,2011-06-21 17:17:59.660503+00:00 -Available win politics last quite general there.,http://www.shaw-oneill.com/,67,Technology,1986-05-25 22:50:49.989420+00:00 -Follow remember generation wait.,https://www.gonzalez-huber.com/,17,Health,2003-02-16 14:05:40.013046+00:00 -Maybe collection walk child.,https://www.miller.org/,34,Technology,2011-05-09 06:55:00.136886+00:00 -Peace air threat nation politics few.,http://chavez-barker.com/,57,Science,1996-02-10 01:58:55.931713+00:00 -Future century technology floor write.,https://hodges.net/,14,Health,1983-01-13 09:45:18.042369+00:00 -Environment easy best face.,https://estes.com/,74,Health,1980-03-27 07:05:03.983171+00:00 -Marriage method material final.,http://www.wagner-levy.com/,75,Health,1972-08-16 04:43:39.776693+00:00 -Manager think pressure like Mr.,http://khan.com/,98,Science,1989-09-24 04:43:00.835150+00:00 -Anything no guy eye hit late.,https://www.hester.com/,44,Business,2020-08-16 04:20:45.322902+00:00 -Current meeting matter and.,http://stone-hernandez.net/,18,Health,2020-03-19 21:58:41.923159+00:00 -Class wrong school federal from stock.,https://cabrera-richardson.com/,54,Business,1986-08-03 22:11:52.052547+00:00 -Ago listen whose situation simply officer return on.,http://underwood.com/,74,Sports,1980-04-15 00:10:44.374907+00:00 -Finish charge real improve simple turn.,http://www.clark.net/,49,Health,2021-07-31 11:08:33.748828+00:00 -Treatment garden great sign return poor really.,https://www.ramirez.com/,39,Health,2008-01-03 05:08:07.093262+00:00 -First give value somebody event business quality.,https://blevins-ballard.com/,6,Health,2021-07-04 10:49:00.713179+00:00 -Get kind either look.,http://horton.org/,31,Technology,1972-09-26 16:40:30.045623+00:00 -Difficult do beyond form line race case.,http://donovan.com/,97,Health,1987-04-26 13:45:26.920961+00:00 -Traditional become off movement.,http://www.khan-thompson.com/,51,Science,1980-01-17 22:18:02.602097+00:00 -Theory across nothing blue work.,https://daniels.net/,60,Health,1980-05-24 22:12:35.041125+00:00 -Former room possible responsibility.,http://bradley-todd.com/,79,Science,2014-08-03 10:35:14.509044+00:00 -Foreign husband believe word local very according.,https://www.molina-finley.com/,75,Business,1970-08-14 04:44:17.202407+00:00 -Realize artist brother fill if maybe.,https://www.hendricks.com/,18,Science,1978-04-05 14:18:45.564686+00:00 -Health reduce tree serious soon stay seven quite.,http://padilla.com/,71,Business,1980-11-24 23:00:30.133625+00:00 -Change foreign minute break day on before.,https://www.anderson-perry.org/,8,Business,1994-12-06 08:29:00.576900+00:00 -Rich how staff second official.,http://graves-weaver.com/,9,Science,1971-09-17 22:28:59.425544+00:00 -Conference career political role white hear.,https://bennett-vega.net/,18,Health,1976-05-25 04:12:53.076492+00:00 -Hotel camera without strong series without leg.,https://wright.com/,36,Technology,2020-07-16 21:21:41.716146+00:00 -Allow produce past view.,http://www.jones.com/,58,Business,2006-03-26 18:43:09.192498+00:00 -Each analysis keep.,http://www.gordon.com/,94,Business,1987-03-12 23:44:48.334203+00:00 -Significant stand down then worry miss including every.,http://simpson.biz/,68,Health,2007-07-18 00:08:52.660537+00:00 -Chance heavy senior list support feeling.,http://hart.net/,68,Technology,1990-12-10 21:58:46.799140+00:00 -Data position two suggest begin.,http://www.bennett-jacobs.com/,10,Technology,2013-05-20 12:40:02.824574+00:00 -Right next look thank four whatever address view.,http://dominguez.com/,51,Technology,1999-10-07 18:49:05.214352+00:00 -Herself move grow end mention.,https://lang.info/,98,Health,2019-04-19 09:23:40.566734+00:00 -Fine animal ten scientist administration network.,https://gonzalez.biz/,73,Technology,1996-12-02 11:03:18.836929+00:00 -Either start whom politics make collection.,https://www.cortez.com/,70,Sports,1998-04-01 17:45:29.514964+00:00 -War property share include.,http://www.hernandez.com/,80,Business,2019-03-19 07:06:33.536386+00:00 -Administration mother in admit reveal movie expert.,https://www.hull.com/,31,Technology,2003-06-01 23:48:52.548696+00:00 -Spring environment however health image.,http://www.sutton-abbott.com/,12,Science,1990-06-13 15:08:45.193004+00:00 -Live PM reflect bar suddenly population former.,http://www.lewis.org/,61,Health,1972-07-05 22:02:34.615537+00:00 -Range exactly myself probably.,http://ali.com/,97,Science,2020-01-30 19:21:53.426701+00:00 -Class drive strong home to.,https://taylor.net/,96,Health,1972-11-23 09:55:13.476991+00:00 -Discussion away painting pass other why.,https://www.nelson.com/,71,Science,1993-04-14 03:16:28.555755+00:00 -Produce beat peace something require bank child Republican.,http://ward-davis.com/,57,Health,2011-01-18 18:20:29.340078+00:00 -Car cost large never.,https://www.davis.com/,77,Business,2012-04-17 12:09:18.516366+00:00 -Activity including single right nice yeah.,http://www.james.info/,19,Sports,1979-01-06 17:17:24.125118+00:00 -Inside machine baby edge east.,https://www.rodriguez-torres.biz/,43,Business,1990-06-25 22:58:25.510941+00:00 -Key issue statement prepare organization feel.,http://thompson-sanders.com/,38,Science,2022-03-06 14:37:00.646491+00:00 -For exactly skill half usually.,http://www.saunders.com/,57,Health,2014-10-26 23:03:27.359106+00:00 -Standard somebody important material.,https://www.hopkins-espinoza.biz/,10,Science,2023-06-01 07:18:52.224125+00:00 -Old move type thank industry already why.,http://www.sandoval.com/,83,Business,1972-05-22 07:25:52.726819+00:00 -Whom former someone black better develop.,http://myers-barton.org/,89,Business,1978-02-16 12:49:30.101309+00:00 -Lot far turn.,http://www.thomas.com/,67,Science,1988-10-11 21:07:12.748646+00:00 -Just recent five feel.,http://www.wong-byrd.com/,95,Technology,1997-10-27 04:11:57.927875+00:00 -Civil nice when discussion if continue policy.,https://mclaughlin.info/,76,Science,1990-12-19 02:56:04.752856+00:00 -Day sell speak artist big.,http://gonzalez-wilson.info/,68,Health,2002-03-09 19:22:59.729271+00:00 -Hotel box goal leader common purpose song try.,http://www.wilson-torres.com/,48,Technology,1997-07-17 09:13:11.015793+00:00 -Others offer institution main Mr try.,http://www.roberts-lopez.info/,80,Sports,2016-08-31 05:42:36.985437+00:00 -Machine crime himself.,https://vasquez.com/,11,Business,1971-08-31 17:20:11.588461+00:00 -Factor ever Mrs collection leader.,https://www.wilson.com/,70,Sports,2010-12-23 07:55:02.408196+00:00 -National wrong bill stop seem fear.,http://bailey-montoya.info/,16,Technology,1981-04-26 00:16:42.651324+00:00 -Ready technology particular into pattern size spend south.,https://ryan.com/,74,Health,2021-05-08 00:29:08.720030+00:00 -World quickly believe while size try yeah.,http://www.thomas.com/,98,Science,1996-05-22 15:11:15.618549+00:00 -Plan physical heart away some shoulder idea.,https://clements-murphy.com/,100,Sports,2009-05-29 19:51:52.310637+00:00 -Play recently sure somebody huge why station.,https://clayton-morgan.org/,17,Business,2015-12-17 06:34:22.147481+00:00 -Thus several animal phone student administration.,http://smith-rubio.com/,89,Technology,2017-07-24 15:14:33.905087+00:00 -City usually ever pressure economy.,https://www.sanders.com/,55,Sports,2007-12-10 12:37:26.558285+00:00 -Degree back order.,https://www.dominguez.biz/,89,Technology,1995-10-04 01:54:42.643527+00:00 -Him room bill finish thing.,http://may.com/,9,Business,2000-05-15 13:46:42.363890+00:00 -Practice key reveal physical character in rather team.,http://www.goodwin.biz/,45,Science,1986-12-16 08:12:45.316066+00:00 -Easy popular easy over.,https://www.liu.com/,96,Science,1985-12-07 22:36:14.407892+00:00 -Six soldier dream.,https://www.porter.com/,68,Sports,2023-03-18 02:49:08.357635+00:00 -Car team nothing half raise several.,http://www.jones.com/,51,Business,1985-06-12 15:23:54.738894+00:00 -Short interest I policy Mrs check base.,http://www.herrera.org/,82,Sports,1986-09-21 23:48:06.998156+00:00 -Role suffer around beat its law.,http://harmon.com/,90,Sports,1978-11-01 06:18:42.496801+00:00 -Mission situation result cold camera sit.,https://www.rogers.com/,68,Sports,1984-03-06 02:50:12.049539+00:00 -Physical many member area teacher cold church.,http://www.yang.com/,2,Technology,1972-06-24 11:42:57.371405+00:00 -Him change use tax need trial.,https://www.wright-moon.com/,35,Sports,2000-10-24 22:23:52.462766+00:00 -Maybe top conference source.,http://wiley.com/,42,Sports,1999-10-27 17:09:23.344356+00:00 -Hard factor six.,http://vargas-snyder.com/,94,Business,2011-01-01 17:52:16.265396+00:00 -Character usually agency must player.,http://blake.com/,14,Technology,1998-07-26 08:03:46.335594+00:00 -Information which win central evidence.,http://www.gutierrez.com/,75,Sports,1997-04-02 02:26:54.368749+00:00 -Avoid nothing itself who door end.,https://decker.com/,10,Technology,1977-05-04 08:57:34.818190+00:00 -Board black last finish building group show later.,https://davis.com/,91,Business,1972-03-05 04:36:30.140562+00:00 -Chance represent why how any federal.,http://www.bond-hawkins.net/,32,Sports,1994-11-29 18:07:46.068991+00:00 -Building article sit develop.,http://parks-pace.com/,75,Business,1983-12-15 19:23:46.062438+00:00 -Against ask total kitchen.,http://www.martinez-dunlap.com/,22,Business,1993-05-09 21:39:47.273140+00:00 -Many him interview government traditional every.,https://www.mendez.com/,83,Health,1996-11-24 09:20:31.263645+00:00 -Look because him information poor something eat.,http://howard.org/,65,Health,1990-12-10 04:07:36.722538+00:00 -Ago others ahead specific.,https://www.moreno-wang.com/,21,Sports,1983-05-13 21:47:53.140238+00:00 -Entire staff true argue you.,http://mcconnell-frey.com/,23,Technology,2001-12-12 03:36:27.306421+00:00 -Trial agreement red way nor none.,http://parker.com/,78,Technology,1971-01-28 10:20:03.113319+00:00 -A truth cut candidate response try such.,https://www.hernandez.com/,100,Sports,2015-08-22 06:25:17.602350+00:00 -Whether health walk how big few.,http://www.fry.com/,23,Business,2007-05-24 21:23:18.259348+00:00 -Skin development open compare fill read camera rock.,https://www.guzman.com/,82,Business,1975-08-13 04:24:32.932589+00:00 -Cell seat draw word collection those become.,https://baker.com/,45,Health,2012-06-07 08:06:58.935809+00:00 -Answer just information.,http://macias-bell.biz/,13,Science,2012-07-11 00:17:17.891195+00:00 -Prepare reach class available suffer far five.,http://www.jackson-bennett.com/,71,Sports,1988-03-22 01:39:21.447216+00:00 -Throw relate issue president parent its approach former.,http://spencer.com/,39,Technology,1975-03-04 19:37:36.875118+00:00 -Occur style child guess into far.,https://www.johnson.com/,11,Science,2015-06-29 20:46:07.655172+00:00 -Size likely thus enter politics.,https://www.mcclure-wood.info/,6,Health,1993-09-16 13:24:14.653759+00:00 -Difficult environmental article sell property.,http://gutierrez.com/,50,Business,1992-07-27 21:12:39.522234+00:00 -Reach view of dog federal house take.,http://www.avery.com/,22,Technology,2013-08-03 13:23:32.500627+00:00 -Amount bad raise dark.,http://bartlett-sloan.com/,12,Science,1999-12-31 03:56:45.558150+00:00 -For everybody leader them skill performance mission.,https://www.hernandez-knight.org/,98,Sports,2019-11-14 02:02:49.795789+00:00 -Single size test they there enough pressure.,https://www.hill.org/,78,Science,1972-02-03 17:20:14.927751+00:00 -Real any little environmental head full.,https://cameron-garrison.com/,30,Technology,1986-03-15 04:34:50.969943+00:00 -Similar fly rock painting become kid.,https://camacho.com/,9,Health,2008-04-04 03:32:09.616940+00:00 -Career surface these ahead cover strategy.,http://www.bryant.com/,36,Health,2014-05-26 20:12:19.369390+00:00 -However type race drop major land whether.,https://martin-kline.com/,37,Sports,1990-11-04 10:02:20.891001+00:00 -Take sister Democrat determine.,http://www.pearson-palmer.com/,61,Technology,2004-11-26 07:41:31.039528+00:00 -Song difference part wall require remember every time.,https://robinson-turner.net/,80,Science,2001-07-06 23:22:11.288919+00:00 -Memory production successful.,https://cohen-olson.com/,36,Health,2016-06-12 20:45:36.673811+00:00 -Prevent weight prepare could military.,http://www.powell.com/,84,Science,2023-10-12 14:13:29.839467+00:00 -Trade identify say on yes.,http://reeves.info/,86,Health,1997-05-09 06:19:21.612910+00:00 -Subject theory Congress best.,http://stokes.com/,84,Sports,1981-12-25 17:05:36.417376+00:00 -Season film small former end.,https://ruiz.org/,75,Business,1973-11-03 09:51:57.910828+00:00 -Matter test high fear approach.,http://www.cooper.info/,19,Health,2016-06-30 15:13:26.795388+00:00 -Single maintain piece computer.,http://www.hill-juarez.com/,74,Technology,1996-05-24 23:07:44.438830+00:00 -Can its investment investment.,https://johnson.com/,2,Health,2010-10-22 04:08:52.962624+00:00 -Course quality phone eat really general can.,https://brown.com/,38,Science,2009-03-29 18:44:06.940092+00:00 -Model its rather card you majority seek close.,https://www.walker.com/,94,Sports,2001-10-17 11:40:09.595060+00:00 -But represent society heart.,https://www.vang.com/,7,Sports,1992-06-21 07:47:25.277505+00:00 -Buy education reduce day rate act.,https://www.meyer.biz/,14,Business,2008-10-02 06:07:30.128579+00:00 -Executive size marriage participant.,https://www.warner-phillips.com/,71,Science,2019-01-31 03:16:11.789680+00:00 -Former able public trade structure protect chance.,http://www.cummings.info/,38,Health,1995-01-12 18:00:36.454475+00:00 -Spend them system growth.,https://wilson.com/,18,Technology,1987-09-21 03:08:43.953676+00:00 -Western environmental run head garden box.,https://houston-vaughn.info/,54,Technology,1976-02-05 08:36:21.449876+00:00 -Word such billion treat.,https://carr.org/,4,Technology,1996-01-18 00:19:19.873274+00:00 -Both in animal energy fire relate question.,http://mills.com/,62,Health,2005-10-14 01:50:45.598017+00:00 -Chance step throughout party activity various.,https://jones.com/,31,Health,2022-04-22 12:22:35.293103+00:00 -Medical nearly her.,http://www.wallace-berry.com/,64,Science,1987-01-16 00:29:07.763672+00:00 -Seat adult idea along near stay social.,https://www.powers.com/,29,Science,1987-11-25 08:21:46.202979+00:00 -Outside respond guy laugh best probably able great.,https://rodriguez.biz/,14,Health,2002-12-08 03:46:08.682041+00:00 -Building system democratic station course.,https://www.williams-schultz.org/,16,Science,1994-08-30 01:54:54.529173+00:00 -What care material morning any very stuff lot.,http://www.ochoa.org/,42,Business,2017-07-09 17:22:39.073491+00:00 -Woman trouble several event white wife.,https://www.gutierrez.net/,39,Business,2016-05-28 10:12:06.486481+00:00 -Plan cut person fact generation public wonder.,http://wilson.org/,86,Science,2006-05-09 11:51:37.763497+00:00 -Experience suggest space mission agent certainly prove.,http://terrell.com/,19,Sports,2007-03-27 11:48:57.464664+00:00 -Daughter must grow laugh pattern.,http://dixon.com/,89,Sports,2019-12-14 17:44:45.300374+00:00 -Indicate small rate whole approach season.,http://pruitt.com/,97,Business,1999-06-22 04:21:27.569317+00:00 -Cultural right cover large work avoid while same.,http://patterson.com/,17,Technology,2001-06-27 02:32:29.778849+00:00 -Accept letter visit always up.,http://tyler-pierce.com/,91,Health,1998-04-30 07:21:46.389362+00:00 -Minute skill city myself.,https://www.walker-lewis.com/,43,Health,1985-10-30 04:38:08.258825+00:00 -Most you statement wonder across protect knowledge toward.,http://www.blackwell.com/,50,Health,1999-02-16 14:04:51.446208+00:00 -Agency might side method meet.,https://www.villanueva-lee.net/,11,Sports,1979-01-10 09:51:45.770101+00:00 -Mouth phone decision picture defense rate structure.,http://montoya.com/,59,Health,1972-01-17 01:46:06.636227+00:00 -Approach some newspaper offer.,http://hughes-mccoy.biz/,41,Sports,1991-01-01 05:44:22.403422+00:00 -Movement political peace series.,http://www.reilly-rodriguez.com/,78,Sports,1985-03-06 18:19:01.400026+00:00 -High state air choose detail section.,https://www.byrd-sloan.com/,2,Business,2017-03-01 06:55:25.812552+00:00 -But above others Mr.,http://goodman.com/,28,Science,2018-10-30 00:26:53.000843+00:00 -Central contain pattern education boy provide note.,http://dunn.com/,26,Business,1996-04-07 23:08:23.830785+00:00 -Ground provide doctor expect attorney cover.,https://jensen-torres.org/,20,Business,2000-07-24 21:20:48.141219+00:00 -When collection finish size Republican political.,https://frazier.info/,60,Science,1989-05-05 11:52:22.928969+00:00 -Main board population bank exactly.,http://www.gross-sims.com/,3,Business,2004-01-21 06:17:14.709209+00:00 -Often shoulder defense already end thus music president.,http://walker-barton.com/,18,Science,1997-01-07 08:01:08.152221+00:00 -However maybe production summer.,http://www.moore-knight.org/,85,Technology,2009-09-05 01:13:27.054571+00:00 -Voice American seven discussion enough doctor protect.,https://hardin-keith.com/,84,Health,1975-07-26 18:02:16.276158+00:00 -Plant event score official now heavy her.,http://kaufman-walker.biz/,46,Health,1996-04-01 11:43:14.163706+00:00 -Thought according think key discover method exactly.,http://www.cox.com/,85,Technology,1992-05-09 18:02:45.968679+00:00 -Result loss long decision claim street.,http://www.anderson.com/,18,Business,1972-02-11 18:19:22.959583+00:00 -Chair coach two might if wide point.,https://www.cole.info/,22,Technology,2014-10-08 21:05:00.009342+00:00 -Central old should perhaps member cold rate two.,http://www.cruz-rodriguez.com/,67,Business,2006-11-24 20:48:44.220482+00:00 -Painting concern top outside.,http://www.hopkins-winters.net/,1,Science,1998-02-23 07:41:43.802774+00:00 -Themselves recently investment voice lot shoulder.,https://www.anderson-woods.com/,11,Health,2002-04-04 19:57:29.381237+00:00 -Impact country remember smile official brother according.,http://cox-moore.org/,33,Health,2016-02-05 13:48:19.998876+00:00 -North partner yeah however data need.,https://www.hansen-webb.com/,74,Health,2011-09-28 18:32:20.680192+00:00 -Save talk process security land record.,http://www.romero.net/,79,Sports,1978-09-01 07:44:12.038195+00:00 -Source statement likely.,https://www.baker.com/,78,Sports,2009-01-23 01:30:50.963266+00:00 -Pass minute represent our watch star.,https://harris.net/,37,Technology,1971-10-24 20:23:49.350133+00:00 -Member recognize past purpose growth pretty wish beautiful.,https://caldwell-sullivan.com/,67,Sports,2009-09-24 20:33:04.607951+00:00 -Try near risk next.,https://perez-mccarty.info/,52,Business,1970-04-02 14:21:41.330066+00:00 -Imagine blood authority family.,http://li.biz/,73,Science,2011-07-07 12:38:36.359434+00:00 -Which learn leave only agree break.,http://kramer.com/,53,Health,1997-11-06 05:23:01.737592+00:00 -Red enjoy expert.,https://spencer.info/,41,Health,1981-10-14 02:22:15.525557+00:00 -Especially water raise travel material current save.,https://www.harris.org/,46,Technology,2011-12-31 04:38:19.991942+00:00 -Turn put professional pass.,http://oliver-vasquez.org/,36,Science,1979-11-11 04:36:10.028084+00:00 -Exactly develop office approach son long must.,https://parsons.net/,59,Health,2007-02-28 05:34:11.025944+00:00 -Purpose here beyond.,https://dunn.com/,61,Business,1988-07-01 09:41:29.268456+00:00 -Material decision red head.,https://white-anderson.org/,88,Technology,2013-11-24 09:28:07.056045+00:00 -Sound new task cultural son.,http://thompson.com/,81,Business,1998-10-16 09:58:58.457664+00:00 -Size feel no nothing huge real.,http://www.woods-soto.info/,96,Health,1984-01-03 11:39:37.153207+00:00 -Night use election after building test.,http://www.kelly-nunez.info/,100,Science,2017-12-19 00:38:32.852313+00:00 -Party less what than political up.,https://miller.com/,41,Health,2005-03-17 09:59:20.260649+00:00 -Painting house thought sit voice.,https://white.com/,94,Technology,2005-09-10 20:01:14.731245+00:00 -Decision however believe view.,https://www.wise.com/,14,Health,2003-04-29 22:26:38.274718+00:00 -Pressure those finally easy.,https://martinez.com/,85,Health,2016-05-21 18:06:44.591722+00:00 -Ten level that various nature career.,http://curry.net/,40,Health,1995-12-20 21:26:08.141050+00:00 -Source kind hand employee field.,http://www.mitchell-nichols.info/,97,Science,1986-02-26 01:26:10.118309+00:00 -Something others someone nature country think.,https://martin.biz/,49,Technology,1984-03-06 17:18:37.578451+00:00 -Personal might son party really even.,https://castaneda.com/,38,Sports,2004-05-03 23:11:50.769912+00:00 -Laugh red man fly charge wide against win.,https://espinoza.com/,30,Sports,1978-02-15 07:52:56.835497+00:00 -Describe inside size marriage he recent all.,http://www.adams.com/,70,Science,1979-10-17 05:15:49.804410+00:00 -Stage stock business should eat.,https://www.vang.com/,67,Technology,1995-06-30 02:02:26.636565+00:00 -View city rock seat near.,http://little-gonzalez.com/,86,Technology,2022-06-23 03:12:17.540892+00:00 -Plan foot cold everybody build.,http://www.perkins.info/,89,Business,1978-05-11 04:02:23.891437+00:00 -Eat matter rather full.,http://www.newman-miles.com/,17,Science,1982-07-18 22:18:21.380103+00:00 -Small series south just cover ready group.,http://washington.net/,38,Business,2007-08-01 08:51:37.915423+00:00 -Note myself food life test.,http://www.moore.com/,59,Sports,2011-11-01 17:03:37.805822+00:00 -Here control affect task simply draw return.,https://www.vega.com/,42,Science,2004-07-21 22:58:25.774087+00:00 -Benefit probably analysis visit.,http://www.rodriguez.com/,5,Business,2004-08-07 15:37:49.958459+00:00 -Price moment red eight hand paper.,http://www.rios.info/,82,Sports,1989-06-17 12:17:01.600445+00:00 -State some around pay more same.,https://www.yu.com/,93,Technology,1998-08-31 16:46:55.792661+00:00 -Cover prevent now way next.,https://www.ray.biz/,14,Technology,2012-09-22 11:54:45.322456+00:00 -Husband front citizen your kitchen good firm.,https://duran-jordan.com/,78,Technology,2000-05-12 16:32:03.585187+00:00 -Heart nearly season growth laugh.,https://www.johnson.com/,39,Sports,1986-03-12 07:20:56.217414+00:00 -Democrat fire sign wall state.,http://www.stevens.com/,34,Health,2017-12-18 21:17:18.426269+00:00 -Do specific begin hundred truth.,http://thompson-petersen.org/,13,Business,2004-08-17 13:20:10.579228+00:00 -Room office bad.,http://hobbs-hamilton.com/,99,Technology,2010-04-24 14:40:19.274913+00:00 -Up plant place approach modern floor north force.,http://www.nunez.info/,97,Business,2001-01-06 04:00:51.254949+00:00 -Develop face hour car.,http://mcgee.biz/,65,Science,2011-09-21 11:58:42.864136+00:00 -Seven lose cup house.,http://www.jones.com/,88,Business,1974-10-31 08:04:37.222876+00:00 -Interest give fly order agency fight.,https://thomas.com/,88,Health,1978-05-30 20:18:56.078855+00:00 -Both approach we unit.,https://www.benson-rodriguez.info/,61,Sports,1987-11-23 11:02:08.393533+00:00 -Deep national seek nature performance yeah reason.,http://escobar.com/,37,Technology,2012-07-20 00:59:01.834059+00:00 -Even let war at produce production tree growth.,http://www.dillon-banks.com/,49,Business,2022-08-27 20:44:52.930826+00:00 -Tough machine others because.,http://house.net/,40,Health,1996-05-31 23:15:55.049909+00:00 -Adult range green few.,https://www.scott.com/,72,Health,1982-03-01 06:31:40.731404+00:00 -Today prevent because him whether partner.,http://berry.com/,78,Health,1994-03-21 05:36:37.689971+00:00 -Deal catch must son.,http://thornton.com/,84,Business,2019-06-13 04:02:24.595708+00:00 -Attack mouth rest contain style certainly.,http://ali-turner.com/,20,Technology,1975-10-04 14:08:27.912731+00:00 -Ago worry pay paper.,http://www.mcdaniel.com/,89,Science,1977-03-25 11:48:14.445685+00:00 -Light power contain hold listen clearly difficult impact.,http://walker-powers.com/,97,Technology,1973-07-26 09:14:31.518119+00:00 -Term majority foot leader east if.,http://edwards.org/,69,Business,2004-07-31 14:30:25.429514+00:00 -Minute later still.,http://www.medina.biz/,67,Sports,1980-03-27 14:45:33.016145+00:00 -Huge fight still leave institution party.,http://www.gallegos-hayes.net/,15,Technology,2011-12-27 02:38:48.607254+00:00 -Career including late condition opportunity Republican dark.,https://www.nguyen-wong.com/,92,Sports,1974-12-01 18:39:21.247189+00:00 -Plant be religious risk window partner civil.,https://www.barr.com/,21,Technology,1979-08-24 07:43:10.780996+00:00 -Information finally shoulder.,https://white.com/,80,Health,1970-08-29 09:36:50.304693+00:00 -Talk myself accept third down open little.,https://burke.com/,71,Health,2003-06-01 16:49:05.414629+00:00 -Star debate continue area give certainly allow.,https://williams.com/,21,Health,2002-12-12 20:14:50.938301+00:00 -Occur no whom see growth along shoulder follow.,https://www.bridges-brown.info/,18,Sports,2007-06-14 17:54:36.953371+00:00 -Forget including decide decade around.,http://barker.com/,55,Business,2015-09-17 04:53:55.822352+00:00 -Moment yet government can none.,https://stephenson.com/,25,Sports,1976-05-16 18:51:49.729595+00:00 -Personal brother summer son yourself increase free.,http://www.harris.com/,39,Business,1999-08-27 17:04:52.325337+00:00 -Box four how.,http://www.rivera.info/,61,Sports,2004-10-23 11:06:12.044278+00:00 -Choice leave tend time leg sport until.,http://hardy.com/,74,Health,2013-02-08 19:05:24.241711+00:00 -At chance subject fund heart move.,http://larson.com/,91,Sports,1984-08-21 10:40:04.260026+00:00 -Market down later week.,http://rodriguez.com/,12,Science,1997-04-13 06:23:21.802881+00:00 -Toward child future low them financial force.,http://www.hardy.biz/,92,Business,1976-03-31 02:12:57.498486+00:00 -Usually reach organization beautiful level.,http://www.miranda.com/,82,Technology,1987-04-20 21:57:32.277981+00:00 -Expert mother significant provide evening.,http://smith.net/,66,Sports,1981-02-20 00:15:55.604574+00:00 -Near even wall sound off commercial.,https://wallace.com/,14,Science,1972-11-25 10:48:35.420546+00:00 -Hot relate through picture real knowledge.,https://roth-marquez.net/,11,Science,2000-08-12 03:39:26.552284+00:00 -Spend turn life lay.,https://meza.org/,34,Health,1991-02-20 22:45:21.706994+00:00 -Small think law air little goal.,https://www.tate-munoz.com/,66,Sports,1991-11-15 22:38:58.869692+00:00 -For power society mouth.,https://oliver.com/,62,Science,1973-07-24 21:08:20.282863+00:00 -Sport opportunity six itself skin.,http://www.caldwell.info/,15,Business,2002-02-20 04:13:52.127366+00:00 -Never state computer what attorney board.,https://newman-brown.com/,18,Business,2018-09-13 07:23:06.563885+00:00 -Serious produce laugh.,https://lane.com/,39,Business,2012-10-29 14:49:39.027930+00:00 -Truth view response trip wrong.,http://phelps.com/,71,Sports,1970-01-17 14:33:56.673328+00:00 -Star grow technology tend dog reduce hour.,https://james-gardner.com/,61,Technology,1980-02-24 15:27:44.437326+00:00 -Successful growth individual eye their by.,http://www.smith.biz/,18,Sports,1977-05-26 04:49:12.738166+00:00 -Team throw receive us perhaps lawyer.,https://kelly-lopez.org/,1,Business,1996-11-09 22:08:04.737442+00:00 -Republican especially unit office almost.,http://richardson.com/,19,Business,2023-01-12 01:33:18.152470+00:00 -Let back three because line.,http://brown.info/,59,Business,2018-11-07 10:16:48.845359+00:00 -Safe inside whether sometimes eye along type.,https://soto.com/,74,Sports,2005-05-21 12:50:17.442112+00:00 -Speak person expert bar measure.,https://gilbert.com/,2,Sports,2002-07-22 17:09:17.561324+00:00 -School process have compare site door Congress month.,http://coffey.com/,64,Science,2011-07-15 09:58:21.565719+00:00 -Produce rise cold official success.,http://williams.biz/,61,Health,1995-09-05 20:10:36.242213+00:00 -Do raise hope enjoy most itself eye.,https://stewart.com/,69,Health,2021-07-21 13:57:56.333210+00:00 -Particular present evening establish sort effort.,https://curry.com/,67,Health,2006-07-16 22:03:20.143536+00:00 -Finish action across attack.,https://www.garcia.com/,33,Technology,1971-11-02 08:45:44.958202+00:00 -Begin voice prevent structure wear point remain build.,http://gallagher.com/,15,Technology,1983-08-29 19:07:30.871333+00:00 -Produce away law yet show realize property.,https://norman.com/,49,Science,1987-12-08 18:44:28.293207+00:00 -Election spend concern attorney.,http://brewer.com/,70,Science,1981-10-10 13:28:18.840430+00:00 -Tree resource billion authority.,https://mosley.com/,5,Science,1984-05-13 18:46:36.283514+00:00 -Toward off middle vote make job.,http://www.cruz.info/,16,Business,1998-11-11 10:32:33.934285+00:00 -Four field area young positive half.,https://www.hodges-roberts.com/,23,Business,1972-02-14 12:38:22.578305+00:00 -Produce federal hope blood read main.,https://www.johnson.org/,31,Science,1990-12-17 18:40:47.723010+00:00 -Poor matter who speech whom administration fund.,https://www.wilson-roman.net/,59,Business,1978-01-12 06:13:20.523464+00:00 -Do author save major affect trade peace drug.,http://www.chen.com/,59,Business,1979-03-16 06:56:28.071441+00:00 -Ask stage box organization past as dark front.,https://daugherty.net/,18,Health,2015-02-05 06:48:47.389177+00:00 -Share or friend building center.,https://www.wood.org/,6,Technology,1975-05-23 14:38:35.302944+00:00 -Partner sing administration main according conference become prepare.,https://aguirre.com/,19,Health,1998-03-15 20:27:49.355395+00:00 -Dream company reality other response partner.,http://barnes-grant.com/,16,Business,2022-08-31 09:36:06.159211+00:00 -Gun seven music despite show process everybody create.,http://thompson-santiago.com/,7,Health,1985-05-15 14:43:48.787752+00:00 -Edge pattern three law bit appear.,http://www.washington.com/,31,Science,2001-12-15 20:03:34.458032+00:00 -Industry section together discover body direction.,http://bennett.com/,61,Business,1996-02-13 03:59:15.101280+00:00 -Certain until lawyer hold carry rest.,https://rogers.org/,8,Health,1977-05-12 01:07:00.505240+00:00 -Learn commercial gas though page smile point.,http://www.jenkins.info/,28,Business,1995-07-13 03:35:53.964938+00:00 -Evidence role save east force often through.,http://www.baird-williams.info/,94,Business,1971-10-25 01:41:30.434028+00:00 -Hour special recent better manager bar send.,https://www.howard.com/,3,Science,2006-01-26 22:31:19.340547+00:00 -Reflect teach section agency.,https://sawyer.com/,48,Science,2007-12-14 13:02:57.065551+00:00 -Team audience throw.,https://www.simmons-nelson.org/,70,Business,2012-11-01 05:15:26.419976+00:00 -Per friend usually method.,https://www.reyes.org/,6,Health,1989-10-03 22:58:01.464040+00:00 -Relate night similar catch friend much power.,http://www.gomez-adams.biz/,36,Sports,1973-06-01 21:26:20.057352+00:00 -Laugh may stop like mother stay.,https://richardson.com/,25,Science,1974-09-16 07:43:10.635590+00:00 -Today eye what body every recent.,https://www.meyer.com/,38,Science,2022-04-11 06:46:28.245853+00:00 -Use listen information scene again country against.,http://lane-rios.com/,63,Business,2005-11-25 07:28:17.549303+00:00 -Success knowledge fight car smile commercial employee.,https://carrillo-williams.com/,37,Science,2021-07-22 04:14:18.727760+00:00 -Under Republican expect win stuff realize project child.,http://www.chaney.com/,57,Health,1989-12-06 17:50:51.611708+00:00 -None alone first gas health.,http://knapp.info/,99,Technology,2021-02-06 06:31:21.246722+00:00 -Choose wife national medical positive far.,https://www.chandler.com/,94,Science,2018-07-14 14:59:41.211434+00:00 -Question bring agent pay.,http://davis-brown.com/,75,Business,1981-12-23 13:16:07.432478+00:00 -Cultural base support quality whatever.,https://www.stewart-jacobs.com/,86,Health,1991-06-08 21:12:52.774594+00:00 -Hard health expect goal within them.,http://www.barron.info/,37,Technology,2013-06-09 15:45:44.918129+00:00 -Rock increase structure our century field.,https://www.avila.com/,54,Technology,1999-12-20 07:50:10.355160+00:00 -Sense carry control high network amount step.,https://singh.com/,34,Health,2021-07-18 11:44:10.756306+00:00 -Buy program explain conference trade population agreement.,https://george.org/,62,Business,1993-07-06 05:59:28.689038+00:00 -Physical live reason build mention.,http://perkins.com/,60,Technology,1989-04-08 18:56:16.171163+00:00 -Evening information treatment.,https://www.carter-fowler.com/,66,Technology,2011-04-29 02:46:45.936453+00:00 -Share out cold follow.,https://jennings-barker.com/,69,Business,1988-12-16 10:30:01.218231+00:00 -Actually million health machine.,https://www.robinson.com/,25,Business,2003-01-27 21:30:51.837514+00:00 -Suffer Republican reduce sort individual trouble bring.,http://www.bailey-vazquez.com/,57,Business,1998-03-16 12:38:55.037122+00:00 -Common fight season budget campaign reason.,http://clark.biz/,89,Sports,1974-03-11 19:07:47.981955+00:00 -Serious amount blue soldier message arrive budget.,https://www.turner.com/,16,Business,1981-01-27 20:22:30.041735+00:00 -Continue goal before suffer walk.,https://wyatt.org/,31,Business,2000-09-29 07:42:28.390673+00:00 -Standard shake more respond.,https://www.smith.net/,91,Science,1989-10-04 13:09:34.841224+00:00 -Continue pull blue strong wonder interest method imagine.,https://www.rodriguez-taylor.com/,91,Health,2000-01-15 06:52:42.788951+00:00 -Bank management his.,https://cherry.com/,55,Technology,2016-03-11 23:01:02.054121+00:00 -Voice happen actually purpose public how mission.,http://www.foster.org/,14,Technology,1973-06-28 07:19:45.723214+00:00 -Full organization wrong but study.,https://miranda.biz/,46,Science,2005-12-11 11:48:18.558929+00:00 -Would memory challenge lawyer business majority.,http://www.garcia.com/,64,Sports,2022-05-30 11:20:57.115619+00:00 -Painting force few perhaps set produce.,https://carter.info/,65,Health,1999-03-31 22:13:13.208416+00:00 -National weight all involve trip himself fire.,https://johnson-thompson.com/,91,Technology,1996-07-01 10:17:38.340623+00:00 -Fine home shake notice.,https://hart-moore.com/,51,Health,1996-09-02 16:07:05.524424+00:00 -Pay good within fly agreement and.,https://silva.biz/,13,Science,1988-08-10 16:56:12.552878+00:00 -In argue each doctor fine be.,http://morgan.net/,42,Sports,1994-11-07 10:14:42.817712+00:00 -Nothing top entire receive onto worry.,https://armstrong-stokes.com/,89,Technology,1988-02-12 00:23:46.222003+00:00 -Specific property coach federal wide cut player.,http://www.baker-evans.org/,52,Technology,2003-09-04 08:20:26.864314+00:00 -Response official option event.,https://reed.com/,65,Health,1993-05-05 01:26:59.019709+00:00 -Finish increase economy middle public natural thing.,http://guerrero.info/,35,Business,2013-08-12 20:49:34.272827+00:00 -Various risk art customer leg drive.,https://flores-greer.com/,82,Technology,2015-08-12 03:32:26.800792+00:00 -Statement position either over image.,http://www.johnson.info/,63,Business,2023-06-12 23:20:12.441621+00:00 -Positive already city carry election sound.,http://www.tucker.com/,41,Business,1986-01-24 22:26:00.593693+00:00 -Too imagine guy price.,http://cameron.net/,16,Business,2005-10-24 01:13:23.662205+00:00 -Wear use edge this draw business southern forget.,https://www.robinson-rodriguez.com/,36,Science,1992-01-07 21:47:29.457253+00:00 -Argue mouth record both nation little east everyone.,http://baldwin-underwood.com/,83,Business,1971-11-17 11:34:37.433628+00:00 -Real small dream.,https://mullen.com/,69,Technology,2016-03-29 03:47:50.022371+00:00 -Account whether until truth feeling view.,http://villarreal.info/,81,Health,2017-06-01 20:22:52.995244+00:00 -Smile ever bag single man say.,https://holloway.com/,65,Sports,1988-12-20 21:46:29.121297+00:00 -Role natural force receive seven political contain kitchen.,http://www.gonzalez-torres.org/,11,Sports,1978-07-05 21:47:05.526969+00:00 -Require should power candidate order improve vote.,https://anderson.com/,37,Technology,1974-07-21 00:29:41.829073+00:00 -Consider then war this difference way.,http://www.gonzalez-haynes.com/,7,Science,2003-04-28 00:04:10.754671+00:00 -Employee become happy water bring responsibility itself turn.,https://walker.com/,35,Health,2010-03-31 06:41:29.790842+00:00 -Candidate task outside yet executive quality.,https://www.sparks-johnson.com/,31,Business,1994-04-25 19:52:15.872906+00:00 -Hard range leader action card.,https://lewis.org/,33,Sports,1975-02-20 06:48:20.287214+00:00 -Appear use likely great.,http://www.wagner-james.biz/,86,Health,1977-02-17 02:13:42.897777+00:00 -American conference college manage fund life bed.,https://www.wilson-williams.com/,64,Health,2010-04-27 11:03:53.418673+00:00 -Executive theory party seven enough fine.,http://vang.info/,7,Technology,1978-01-05 16:55:27.533633+00:00 -Consumer like clear level break.,https://www.kelly-fletcher.info/,59,Science,1976-01-25 05:15:31.352824+00:00 -Buy behavior test Congress.,http://macdonald.com/,68,Business,1979-09-26 20:09:18.471718+00:00 -East marriage behind forget.,https://www.brooks-hickman.com/,42,Technology,1971-07-23 14:37:13.094177+00:00 -Store process law western order accept wide.,https://cooke.com/,28,Sports,2013-03-20 15:01:18.855523+00:00 -Follow area audience weight high.,https://allen.com/,3,Technology,1978-10-14 16:45:40.444241+00:00 -Expect sell pressure building make it.,https://www.wade-collier.net/,26,Science,1994-01-06 20:16:13.064258+00:00 -Live none institution series specific pass.,https://williams.info/,87,Technology,2004-12-01 00:40:39.336903+00:00 -Us money positive base.,https://solomon.com/,20,Technology,2006-11-23 19:20:27.728678+00:00 -Success mission far answer baby.,http://www.coleman.org/,33,Health,1983-09-28 19:36:56.276640+00:00 -Shoulder administration scientist leg.,http://miller.com/,41,Business,2009-01-15 04:27:11.848806+00:00 -Perhaps enter whom the cost continue section.,https://www.ferguson-smith.com/,2,Technology,2005-05-11 07:35:22.893953+00:00 -Reality than whole own main peace wind.,https://miller-taylor.com/,15,Sports,2007-07-12 13:43:17.952924+00:00 -Teacher decide possible power job.,http://www.carrillo-hernandez.biz/,3,Sports,1978-12-16 00:30:29.748459+00:00 -School knowledge machine after.,https://www.phillips.com/,43,Sports,1975-07-15 16:40:00.463324+00:00 -Woman support say other never fact.,https://palmer-hardy.info/,30,Sports,1998-06-26 16:27:02.814829+00:00 -Office network environmental possible threat bring.,https://graham.info/,45,Science,1982-07-07 16:11:46.196078+00:00 -Onto rate two table resource.,https://www.sharp-myers.biz/,87,Health,2012-11-30 15:18:24.709441+00:00 -Perform executive see project.,http://www.moore.com/,54,Science,1986-05-16 15:27:51.923623+00:00 -News yourself grow end quality.,http://www.morgan.org/,30,Sports,2000-04-19 18:45:32.651230+00:00 -Election future great another there environment character.,http://www.lloyd-estes.com/,14,Business,2002-11-14 14:33:22.308959+00:00 -Statement among black picture.,http://chandler.info/,98,Business,1996-11-14 12:44:41.045999+00:00 -Child hair standard but.,http://hammond-smith.info/,44,Health,1997-03-25 03:01:16.153214+00:00 -Entire resource increase picture.,http://www.fisher.biz/,53,Science,2021-04-27 01:19:47.354765+00:00 -Some gun ahead certainly act south want.,http://morales-barton.biz/,8,Science,2010-12-07 08:16:16.814847+00:00 -Total around place require.,https://blackwell-haynes.com/,93,Health,2019-01-08 23:12:30.023216+00:00 -Still too particular.,http://schneider-clay.org/,80,Science,2007-08-09 21:35:39.692620+00:00 -Simple thing base Mr hope rest.,http://davis.com/,67,Health,2010-10-26 02:00:12.892293+00:00 -Age three music else.,https://rodriguez.net/,46,Sports,1972-07-28 23:25:58.230186+00:00 -Large occur accept fund.,http://www.evans-estrada.org/,87,Technology,2008-07-07 07:23:55.716846+00:00 -Scene black might court sing lawyer chair.,https://davis.com/,61,Technology,1994-01-21 01:20:44.885418+00:00 -Quality idea next single the once.,https://galvan.com/,19,Business,1976-11-08 09:07:40.249302+00:00 -Can relate tell brother defense list second.,https://www.phelps.com/,6,Business,1978-04-07 22:31:43.410792+00:00 -Figure hear family agreement central six.,https://www.small-lewis.info/,1,Sports,1999-07-10 08:11:19.653536+00:00 -That individual of not.,https://davenport-bass.com/,98,Technology,1997-03-20 00:40:33.143473+00:00 -Seven project chance road become person perhaps.,https://www.morgan-williams.info/,95,Health,2004-10-28 16:01:14.279761+00:00 -Religious occur someone religious.,http://www.friedman.org/,97,Sports,2021-08-12 10:10:14.833394+00:00 -Country live to build view conference affect.,http://www.combs.net/,78,Health,1991-08-31 09:03:14.947837+00:00 -Man value future.,http://heath.biz/,33,Sports,1976-03-07 15:09:21.472537+00:00 -My experience consumer shoulder.,http://gregory-brown.com/,24,Sports,2008-06-16 14:58:53.524531+00:00 -Hope floor include food heart.,http://www.allen.org/,31,Science,1997-06-03 23:45:35.315518+00:00 -His boy art let imagine machine imagine.,http://www.spencer-willis.com/,32,Business,2005-02-16 14:55:55.933287+00:00 -Fight image base player.,http://www.fernandez-jackson.com/,69,Science,2021-10-31 08:40:18.034972+00:00 -Later a let stop eye will a.,https://king.com/,75,Business,1985-03-02 16:53:03.079836+00:00 -School none feel house.,http://www.hawkins.org/,65,Health,2017-08-28 18:19:19.621631+00:00 -Play woman remain event production focus.,http://murphy.com/,78,Science,1970-03-26 06:27:03.908706+00:00 -Pull including time.,https://miller.net/,4,Science,2006-03-31 03:46:15.903809+00:00 -Will heavy over hit stuff speech.,http://www.wood-lee.com/,29,Sports,2010-11-22 03:41:59.493286+00:00 -Course staff personal space determine woman.,https://www.vasquez-padilla.org/,27,Sports,1980-07-28 10:13:19.016498+00:00 -Book everyone matter color create school.,http://www.anderson.org/,30,Business,2009-04-19 18:59:51.900316+00:00 -Court question yeah realize mouth.,https://pace.com/,42,Science,2015-07-11 03:19:59.015856+00:00 -Ever quite level guess service this.,http://www.hull-patel.com/,1,Science,1980-06-29 07:22:57.557133+00:00 -They space wrong difficult.,https://www.gonzalez.com/,9,Sports,1971-07-03 21:10:29.548904+00:00 -Space or phone free economy blue.,http://underwood.com/,10,Business,2003-05-28 18:32:19.377363+00:00 -Personal often beat art.,http://wu-steele.com/,8,Science,1972-12-14 22:12:28.331937+00:00 -Phone assume quality nice far high benefit sell.,http://www.reyes-pope.org/,80,Sports,1988-04-23 20:59:11.346339+00:00 -Law foreign court.,http://www.reeves.org/,17,Business,1983-10-26 21:31:16.166318+00:00 -Loss per control case analysis camera.,https://silva-weaver.com/,19,Science,1986-08-23 13:12:00.633397+00:00 -Fact wife military price certain down.,http://gonzalez-gonzalez.com/,9,Health,1980-03-17 20:55:21.974249+00:00 -Reason word outside former official white pattern.,https://garcia.info/,29,Sports,2004-06-09 21:02:43.271856+00:00 -Level defense lose shoulder spring.,https://www.lopez-fritz.com/,2,Sports,1989-04-18 15:48:50.215139+00:00 -Boy fact create one she analysis ahead.,http://robinson.com/,44,Business,1983-04-06 15:13:44.336827+00:00 -Again successful treat again quality.,https://www.boyle-torres.com/,48,Health,2005-08-19 21:36:10.827212+00:00 -With young think there assume certain under.,http://www.gray.com/,6,Business,2001-08-31 20:59:57.872369+00:00 -Family chance easy picture evidence call meet.,https://www.miller.com/,22,Business,1990-12-04 22:40:45.815735+00:00 -Hear million condition Republican moment low what.,https://morales.com/,27,Technology,2018-04-01 10:55:02.377411+00:00 -Tell how fill thousand number very hear.,http://www.phillips.com/,30,Business,1972-03-21 04:54:02.758728+00:00 -Perform international level important source first him.,http://baker.com/,11,Health,2015-06-06 21:50:40.441212+00:00 -National back everybody direction budget growth national.,https://www.murphy.com/,25,Business,2023-10-22 23:06:30.822693+00:00 -Treatment across federal state situation identify if.,http://hunt.biz/,30,Technology,2008-12-31 05:49:58.637717+00:00 -Listen middle skill serious reveal.,https://edwards.biz/,99,Science,1981-04-20 05:36:20.252172+00:00 -Table air catch them figure scene matter.,https://barnes.biz/,98,Business,1986-02-26 14:03:01.762671+00:00 -Age size two child well form picture owner.,https://fry.net/,50,Technology,1995-08-07 12:38:37.010868+00:00 -Notice later thus behavior bit need.,http://lopez.biz/,37,Technology,1982-07-26 02:42:53.364224+00:00 -Attorney arm million commercial teacher improve church.,http://www.barnes.com/,17,Technology,1986-09-13 20:55:59.875532+00:00 -City offer financial data respond learn cut.,http://summers.com/,68,Health,2019-01-30 01:42:24.164806+00:00 -Least office say explain thing lead red.,https://colon.com/,80,Science,1994-09-07 17:59:50.130356+00:00 -Recognize believe recognize analysis.,http://medina.com/,64,Science,2023-07-04 13:37:03.805152+00:00 -Building hard sense mean single improve close.,http://www.johnson-fowler.info/,49,Health,2010-03-21 17:11:30.358631+00:00 -However action boy true force.,https://jones-kelley.com/,92,Science,1977-07-24 23:26:58.064912+00:00 -Success color adult beyond family.,https://www.powers.com/,20,Science,1989-04-22 14:29:28.174894+00:00 -Research throughout mother kind answer.,http://parker.biz/,99,Technology,2020-01-29 17:39:36.344321+00:00 -Training customer administration agreement address star.,http://www.robles.com/,93,Technology,1972-05-16 17:07:19.496419+00:00 -Blue force no couple debate must voice.,http://www.rodriguez.com/,2,Science,2005-04-05 16:59:43.021568+00:00 -Home learn at.,https://jones.biz/,39,Business,1996-12-07 20:48:32.142363+00:00 -Republican social two notice.,https://www.nguyen.com/,43,Technology,2004-04-03 15:42:57.184522+00:00 -Within project attack adult treatment large.,http://www.smith.com/,31,Science,2010-10-30 00:15:10.135754+00:00 -Bag case material dark heart politics draw rise.,https://ramirez.com/,67,Science,2004-12-09 14:38:54.044503+00:00 -Seek director run garden day.,http://www.horton.net/,42,Technology,1993-11-07 03:53:06.210497+00:00 -Ask man west.,http://www.martinez.org/,71,Technology,1982-09-28 12:30:31.591706+00:00 -International level clearly culture.,https://www.wright.com/,35,Science,1974-04-25 17:01:06.994170+00:00 -Huge cold kid measure same set.,https://www.simmons-miller.com/,77,Business,1978-03-28 23:28:39.046230+00:00 -Either less law heart.,https://www.taylor-harrison.com/,25,Sports,1970-09-10 17:42:46.418904+00:00 -Room hear identify indeed house score himself.,https://www.dyer-riddle.com/,50,Business,1978-10-03 07:26:44.660030+00:00 -Whatever side seem give remember play.,https://www.cruz.info/,5,Health,2004-01-07 23:50:46.481226+00:00 -Recently population end.,https://www.wright-lane.com/,2,Sports,2011-07-27 07:37:17.374199+00:00 -Huge only too million country institution.,https://turner.biz/,76,Health,2009-07-27 22:25:41.998133+00:00 -Body of something sport.,https://www.sanders.info/,1,Sports,1973-10-07 08:22:25.881165+00:00 -Modern pressure run color hear region.,http://shaw.info/,73,Technology,1980-01-04 15:56:58.755542+00:00 -Seek speak court work.,https://durham.com/,38,Sports,1990-05-14 09:26:51.565766+00:00 -Ask ahead doctor magazine set.,http://www.walton-smith.org/,6,Sports,1981-05-19 10:47:04.485210+00:00 -Heart mind while.,https://sherman-munoz.com/,48,Health,2023-11-20 12:08:08.621320+00:00 -Wait professor her arm green.,https://www.bryant.biz/,93,Technology,1971-05-21 17:02:30.496048+00:00 -Than evidence recognize spring.,https://parks.com/,1,Health,1970-11-24 11:47:14.297539+00:00 -Them wide save question do machine.,http://crosby-david.info/,85,Sports,2009-08-03 17:09:13.283285+00:00 -Parent Republican thank then size.,http://www.robles-turner.net/,60,Technology,2018-05-01 11:09:52.491604+00:00 -Lot music during color.,http://www.holder-johnson.com/,50,Health,1981-11-18 17:45:48.729409+00:00 -Program week provide program two.,https://www.cook.org/,9,Health,1990-04-30 06:00:21.415835+00:00 -President size whom join do else.,https://www.davis.org/,25,Sports,2000-11-19 14:14:30.539864+00:00 -Table west officer ability.,http://www.whitney-flores.com/,87,Technology,1998-01-09 04:03:13.427246+00:00 -Opportunity blood across system firm staff father despite.,https://www.campbell.com/,77,Health,1986-08-27 19:45:51.050630+00:00 -Event summer off piece.,http://padilla-bennett.com/,20,Technology,2015-09-22 13:26:28.166523+00:00 -Red apply tend condition maintain.,https://gaines.biz/,44,Science,2006-07-20 11:22:31.719286+00:00 -Serious hospital month executive recent crime.,https://www.morris.com/,91,Health,1994-09-01 22:16:00.209063+00:00 -Officer kind lot today.,http://www.gonzalez.org/,14,Technology,1990-04-21 07:11:12.493875+00:00 -Quickly collection argue green listen war.,http://hamilton.com/,5,Sports,1982-12-27 00:41:51.830317+00:00 -Few other common seat simply yard provide.,http://reyes.com/,16,Sports,1974-06-16 07:45:36.700282+00:00 -Political training different only would look really.,http://www.washington-newman.biz/,39,Business,1976-10-02 05:02:29.636740+00:00 -As training somebody continue spend after.,https://www.hood.com/,97,Technology,2022-01-24 14:36:20.316807+00:00 -Congress out conference never.,https://www.anderson.com/,12,Business,1983-11-05 05:43:22.382745+00:00 -Be by fear.,http://smith.net/,38,Business,1993-12-27 13:30:11.951100+00:00 -Make fine treat you paper member reach.,http://www.lopez.com/,18,Business,1999-10-07 14:43:20.975494+00:00 -Grow campaign performance avoid effort high tough hundred.,http://coleman.com/,81,Health,2023-09-01 01:04:48.974714+00:00 -Research nor positive memory clearly.,http://gray-maynard.com/,10,Health,1976-09-20 02:16:49.176205+00:00 -Range image forget become.,https://melton.com/,2,Sports,1990-05-03 02:50:50.072062+00:00 -Her bit understand compare rock.,https://rice.org/,37,Business,1984-08-12 17:05:07.405257+00:00 -To color water hope.,https://vargas-green.net/,92,Technology,2005-06-15 00:19:32.545806+00:00 -Her save apply rise.,https://www.franklin.com/,60,Business,1985-06-19 04:14:31.948362+00:00 -Minute responsibility under director player beautiful.,https://bruce-walters.com/,94,Technology,2020-12-10 17:28:54.495134+00:00 -Offer be experience upon.,http://romero.net/,88,Business,2007-08-17 23:50:04.204245+00:00 -Add couple story account interesting move.,https://www.guerrero.net/,88,Business,1972-04-14 02:12:54.574006+00:00 -Who purpose think model different avoid style.,https://www.simpson-jackson.com/,44,Technology,2014-11-04 17:10:56.917790+00:00 -Face low enter able.,https://www.hickman-haley.net/,5,Science,2018-02-13 09:01:14.194417+00:00 -Arm accept leave fear blue glass reveal.,https://garcia.com/,11,Business,1997-09-24 04:25:14.690120+00:00 -Improve tree according show.,https://www.foster-evans.biz/,24,Business,2013-10-03 06:32:15.235195+00:00 -Writer prevent keep cause.,https://www.stephenson-fletcher.com/,25,Health,1972-03-03 17:52:14.325547+00:00 -Inside former all program way where.,https://www.burns-simpson.com/,42,Science,1997-01-21 19:35:15.937711+00:00 -Capital whatever society call general add.,http://www.gallegos.com/,56,Science,1985-08-10 07:14:52.209318+00:00 -Bar investment pay role cost exactly boy.,https://www.hoover.com/,90,Business,2002-12-13 01:17:55.796036+00:00 -Yeah option play director car.,http://hickman.com/,79,Technology,1974-11-26 17:45:19.261305+00:00 -Herself modern record.,http://www.simmons-brown.biz/,83,Sports,1986-05-29 05:02:26.157984+00:00 -Hair wall represent east time.,http://www.williams.com/,28,Health,1995-06-14 23:21:44.857717+00:00 -Beyond continue way hair fight best add necessary.,http://figueroa-ward.org/,68,Business,1990-01-13 13:20:30.878119+00:00 -Decide himself thought candidate sea.,http://www.donaldson.com/,91,Science,2019-11-25 08:36:29.917451+00:00 -Walk people feeling listen.,http://www.anderson.net/,92,Health,2018-01-26 12:14:06.358158+00:00 -Window thus large response treat attack industry.,http://www.hill.org/,8,Technology,2009-12-08 11:36:45.600851+00:00 -Manager mouth message avoid just meeting.,https://nelson.com/,76,Sports,2017-05-26 16:48:55.663788+00:00 -Lay remain modern field detail remain need.,http://www.ferguson.com/,82,Technology,2020-10-16 04:15:00.070600+00:00 -Until establish so politics eat.,http://www.wong.org/,48,Health,1974-06-28 07:39:47.001686+00:00 -Enough card together far world personal action.,https://www.sanford.net/,85,Business,1975-09-28 02:47:28.607977+00:00 -Watch either phone page deal kind.,https://perkins.com/,58,Technology,2009-05-07 18:30:59.365916+00:00 -Happy total behavior consider continue television.,http://white.com/,51,Business,1980-02-17 04:25:28.582805+00:00 -Author early training wife.,http://www.christensen.biz/,3,Sports,2014-07-26 00:30:23.811661+00:00 -Car show environment.,http://www.daniels.com/,26,Technology,1995-11-26 17:53:10.635257+00:00 -Throughout spring film look during myself environment.,http://www.garcia.com/,66,Science,1977-08-25 01:50:22.991413+00:00 -Agency interview challenge pretty difficult.,https://www.jackson-rodriguez.com/,37,Health,1974-02-11 08:01:41.428695+00:00 -Brother type turn time page concern.,https://www.coleman.org/,37,Health,2017-03-13 19:09:00.765563+00:00 -Trouble product usually specific represent.,http://www.strickland.com/,19,Science,1994-01-31 12:27:02.288718+00:00 -Spring both into risk certain follow camera.,https://snyder-olson.biz/,78,Health,1977-06-14 21:54:55.705879+00:00 -Himself near much manage movie.,http://carpenter.net/,74,Technology,2007-11-28 14:49:09.191143+00:00 -Gun data sure gun listen.,http://www.pruitt.com/,38,Science,1977-10-09 11:19:37.521333+00:00 -Will tree drug still argue case while.,http://johnson.com/,85,Business,1973-12-23 17:11:50.122223+00:00 -That air half bad baby notice sport change.,http://taylor.info/,53,Health,1993-12-08 22:05:55.032287+00:00 -Expert choose thing.,http://www.martin.info/,71,Health,2019-07-23 07:17:17.237266+00:00 -Fill dinner use even begin help week.,http://brown.net/,78,Sports,2014-01-10 00:42:35.901731+00:00 -Local present if herself area he interest.,https://www.miller.com/,4,Technology,1985-03-20 08:10:52.851000+00:00 -Blood deep action travel could cut upon.,http://davila.com/,73,Science,1972-06-18 13:29:48.968002+00:00 -Behavior final wonder seem when matter.,https://kelly.com/,53,Business,1975-06-12 23:49:06.671753+00:00 -Age somebody local than.,http://hernandez.com/,81,Sports,2021-05-16 07:26:02.889650+00:00 -This space pressure shoulder claim.,https://morgan.org/,56,Sports,2018-01-08 15:42:05.867696+00:00 -Dream young parent when note yes.,http://www.miranda.net/,12,Science,1975-01-28 08:00:10.562829+00:00 -College fast attention office government author.,https://good-hines.info/,84,Technology,2020-09-02 12:40:36.204387+00:00 -Major guy expert important people quite kid.,https://fleming.info/,34,Sports,2003-06-18 09:26:58.764995+00:00 -Scientist each start account.,http://moore-ross.com/,29,Science,2017-02-17 07:33:06.277068+00:00 -Give life movement stand.,http://www.dennis.biz/,26,Science,1987-09-22 07:24:48.085157+00:00 -Garden page defense scientist boy lawyer.,http://bowman.org/,5,Business,2011-09-28 23:45:23.297596+00:00 -View gun involve list.,http://www.cisneros.biz/,35,Health,2004-04-10 05:29:43.421045+00:00 -Center event bank them.,https://lewis.com/,12,Science,1988-06-10 04:32:02.034378+00:00 -Clearly glass industry little read.,http://gaines-hudson.com/,72,Business,1980-09-15 03:05:38.391853+00:00 -Pattern food worry voice sea say first character.,https://www.lin-wilson.biz/,34,Business,2008-12-10 13:39:13.051071+00:00 -Wall tax plant police official already second.,http://payne.com/,27,Technology,2022-05-24 00:18:32.205244+00:00 -Western have hair few film standard by.,https://jenkins-bell.net/,55,Business,2022-07-08 19:27:21.870730+00:00 -Box compare into computer organization myself.,https://barker.org/,13,Sports,2013-06-14 21:56:27.364301+00:00 -Bed administration usually billion.,http://perkins.biz/,28,Technology,2011-11-20 15:52:57.478633+00:00 -High crime light buy whose trial include.,https://www.casey-romero.com/,85,Health,1986-07-29 09:19:20.602798+00:00 -Pull above training allow ago so.,https://haley.com/,72,Science,1980-05-21 22:50:48.368121+00:00 -Stock soon official can player within.,http://holmes-kim.com/,91,Technology,1997-03-10 01:08:45.352887+00:00 -Officer hot several true study city follow behavior.,http://fuller.com/,4,Business,2008-02-24 21:22:42.924418+00:00 -Occur agent safe work.,https://www.delgado.com/,25,Business,2015-04-25 15:19:25.639359+00:00 -Side process ever various physical point.,http://www.ho-martinez.com/,37,Health,2018-03-26 22:39:41.198892+00:00 -Back suddenly tree debate.,http://gutierrez.com/,27,Science,1990-03-26 16:06:37.084776+00:00 -Plant late claim federal tough process.,http://www.chaney-webb.org/,77,Business,1970-04-09 17:58:14.745933+00:00 -Score unit cover ahead age memory charge.,https://www.johnson.com/,11,Technology,1984-06-05 12:12:09.072212+00:00 -Current begin remain present level speak job.,https://www.thompson-mcdaniel.com/,51,Sports,2001-06-20 10:53:24.167346+00:00 -Find some west maintain third interest section staff.,http://gonzalez-gomez.com/,58,Technology,2017-12-15 10:15:54.977777+00:00 -Education important page theory wear most huge.,http://www.jordan-lopez.com/,10,Technology,2005-06-03 00:49:15.584790+00:00 -Certain fire act read response degree.,https://www.jones.com/,14,Business,1984-12-28 10:48:55.277358+00:00 -Watch space provide off position radio itself spend.,http://www.lang.com/,36,Science,1976-06-06 12:00:20.527203+00:00 -Commercial force him office expect popular task.,http://anderson-rasmussen.com/,69,Business,1972-02-10 08:56:47.009453+00:00 -Attorney believe sort.,https://molina-ross.net/,100,Health,1996-08-16 11:44:40.362092+00:00 -Hotel recent lawyer.,http://garcia.com/,14,Health,2016-06-10 20:09:39.272336+00:00 -Difficult because right speech staff Mr.,http://www.ware-williams.com/,39,Sports,1984-03-21 10:17:17.078796+00:00 -Social here recognize understand on build floor challenge.,https://martinez.info/,36,Health,2011-08-29 05:44:21.847515+00:00 -Difficult enough fast bar next their process.,http://www.bonilla.com/,2,Technology,1995-03-30 00:28:37.078165+00:00 -Could bit decade opportunity month teach.,https://moore.com/,47,Science,2022-07-28 09:29:10.195130+00:00 -Message sister could everyone clearly film thank possible.,http://mcgee.com/,59,Technology,1984-05-20 02:06:12.153098+00:00 -Product laugh enter until base issue character.,https://alexander.com/,7,Science,2009-06-19 21:24:40.354239+00:00 -Create country computer every parent million.,http://www.wright.com/,74,Health,1982-11-15 21:44:03.257437+00:00 -Big difficult per respond hot big safe.,https://jackson.com/,82,Health,2008-01-21 23:15:43.421280+00:00 -Safe behind me red live.,https://www.edwards.com/,24,Health,2018-07-08 14:43:45.653252+00:00 -While program into again.,https://www.shelton.org/,46,Science,2001-06-10 19:35:30.823017+00:00 -Their line and admit quite fact direction.,http://www.garcia.com/,58,Science,2009-08-14 07:04:23.390231+00:00 -Either main hair add good report through.,https://www.jennings-chandler.com/,19,Science,2016-03-01 22:49:14.184411+00:00 -Ground all officer receive.,https://garner.biz/,82,Sports,2022-01-20 02:51:23.769848+00:00 -Quickly tonight last when money enter quickly thousand.,https://parker-williams.net/,90,Health,2014-12-22 20:48:39.295628+00:00 -Item lawyer why society.,https://www.porter.com/,60,Technology,1998-03-17 19:23:49.793263+00:00 -Perhaps professor field explain son.,http://robinson.com/,7,Science,1986-06-28 08:51:38.619386+00:00 -Good street anyone case these size.,http://www.nelson.org/,11,Health,1981-10-19 20:12:05.245403+00:00 -Usually eat question hit human tough throw.,https://elliott.com/,75,Technology,2006-11-04 06:08:50.760331+00:00 -Economic indicate current figure father.,https://www.patton-bass.com/,18,Business,1993-10-29 02:41:20.880748+00:00 -Mean rate group government level which.,https://www.johnson.com/,79,Health,2012-05-30 16:46:27.450838+00:00 -Mention camera myself not fall sense hotel idea.,http://stevens.com/,43,Business,1975-04-03 00:35:11.211321+00:00 -Not real ten building once.,http://www.thomas.com/,87,Technology,1986-03-23 07:11:12.259373+00:00 -Rest view south perhaps customer.,http://www.munoz.net/,13,Technology,1981-01-06 00:56:05.434353+00:00 -Agency media hospital unit feel low contain.,http://maldonado.biz/,19,Sports,1979-12-19 07:35:13.762108+00:00 -Color score national live face.,http://contreras-myers.com/,60,Science,2013-11-10 16:01:32.648206+00:00 -Agreement lead nor action.,https://www.santos.com/,18,Science,2017-07-30 01:53:09.837651+00:00 -Everybody word group task me give relationship.,https://www.rios.org/,44,Science,2008-07-23 16:18:43.608134+00:00 -Everything several field hour.,https://obrien-white.net/,100,Business,1983-12-23 18:58:45.214804+00:00 -Open oil once stop attack ahead.,http://www.ortega.info/,42,Technology,2022-11-28 17:49:55.183035+00:00 -Black thing rock himself wind.,http://www.king.info/,98,Health,2002-09-14 02:46:59.353997+00:00 -Type fly recognize.,https://www.berry.info/,87,Health,1982-02-27 13:48:29.009897+00:00 -Its more so beat.,http://www.johnson.com/,47,Science,2000-08-22 02:40:22.247903+00:00 -Future radio whatever rock.,https://www.hudson.net/,46,Health,2003-05-07 08:28:31.895099+00:00 -Table soon keep personal them notice camera.,https://www.jimenez.org/,70,Health,1971-03-20 21:59:22.104800+00:00 -Two edge reflect attack the.,https://www.johnston.net/,40,Science,2003-05-10 14:57:17.550324+00:00 -Recently book likely true enjoy air full.,https://www.barron-brown.com/,71,Health,2019-10-30 05:44:16.975394+00:00 -Dog authority way toward.,http://dodson.org/,11,Business,1995-10-25 04:12:04.437038+00:00 -Establish among along medical.,https://turner-horn.com/,3,Sports,1971-06-03 23:17:57.601523+00:00 -Move team radio scene need.,http://dominguez-jordan.biz/,71,Technology,1976-03-26 02:44:38.207194+00:00 -Today work happy create.,https://www.hall.com/,30,Science,2010-04-20 14:44:44.257819+00:00 -Nothing yard address rest spend baby camera.,http://edwards.com/,38,Technology,2002-09-06 01:16:00.468961+00:00 -Price prevent baby voice day relate spend.,http://turner.com/,68,Health,1987-03-12 02:35:54.123523+00:00 -Me approach too relationship perform evidence data.,http://www.harmon-clark.com/,93,Technology,1971-12-08 07:43:21.842388+00:00 -Recognize practice law box material.,http://chapman-evans.com/,8,Health,2016-01-28 15:29:15.371354+00:00 -Manager television his and day continue.,https://www.torres.com/,78,Health,2019-10-01 04:21:07.412303+00:00 -Special opportunity serious game stop.,https://mcfarland.net/,50,Health,2016-04-15 05:32:07.624497+00:00 -Less occur nothing million class high second.,http://www.campbell.com/,11,Technology,2011-08-03 13:04:53.024644+00:00 -Front house past gun process.,http://www.carroll-morris.com/,23,Science,1994-12-13 03:59:30.776248+00:00 -Across already grow point news impact.,http://arnold.com/,99,Health,1983-12-29 23:45:08.799135+00:00 -Camera piece chair peace anyone boy although.,https://tran.com/,92,Health,2022-08-01 16:20:56.932390+00:00 -Question tax government.,http://www.rodriguez-hunter.com/,66,Sports,1982-10-11 16:29:36.259525+00:00 -Now stop race author interview executive.,http://french.com/,61,Health,2003-07-14 01:06:13.050355+00:00 -Happy fact change option.,http://www.dixon-norman.net/,75,Health,2020-09-04 16:50:16.020060+00:00 -Store allow risk high young bad.,http://www.gordon.biz/,69,Sports,2015-09-13 05:00:33.430530+00:00 -Sometimes parent process minute first crime offer.,http://lloyd.com/,59,Health,2010-05-04 15:31:43.632546+00:00 -Realize knowledge whatever improve.,http://wagner.org/,30,Science,2012-05-24 05:47:08.578272+00:00 -Notice step by Mrs simply.,https://www.green.com/,53,Science,1997-03-25 04:32:17.389889+00:00 -Office plan significant tonight.,https://www.martinez.net/,92,Science,2010-01-19 14:26:48.368930+00:00 -Thank street agency cold.,http://www.camacho.com/,17,Science,2007-07-30 05:18:33.153493+00:00 -Republican budget today together option.,http://www.rodgers.info/,53,Science,2009-12-08 03:14:20.872915+00:00 -Eye local care contain she trouble.,http://www.watkins.info/,49,Health,1993-08-24 20:17:40.652376+00:00 -Reveal evidence significant discussion manage answer military.,https://cruz-todd.com/,6,Technology,1993-03-19 08:00:20.296600+00:00 -Offer past between expect budget.,https://walker-jackson.com/,78,Sports,2019-11-05 23:10:47.648926+00:00 -Cell deal stuff senior agree.,http://www.freeman.com/,27,Health,2020-10-11 10:48:05.778728+00:00 -Interview know call law truth respond try.,http://snyder-miller.com/,36,Sports,2022-03-19 22:20:16.229995+00:00 -Season develop avoid church.,https://mccoy-hunt.com/,91,Health,2018-02-09 00:22:59.864087+00:00 -Item usually some.,https://www.guerrero.com/,32,Technology,2001-08-12 12:39:08.009351+00:00 -Pull enter charge sport safe.,https://www.walters.net/,93,Technology,1970-03-04 23:17:48.216748+00:00 -Make particularly report although.,https://silva.com/,39,Technology,2017-03-06 15:32:41.109831+00:00 -Make picture our myself far despite while.,https://nguyen-bates.com/,23,Science,2021-07-24 04:17:25.717849+00:00 -Girl huge campaign think.,https://young.biz/,8,Business,2011-08-12 14:13:38.913623+00:00 -Push in speak interesting end.,http://chandler-morgan.com/,40,Science,1992-02-05 04:27:55.980058+00:00 -Then movie single book why particularly can.,http://www.davis-bush.com/,99,Business,1973-06-03 18:37:17.250695+00:00 -Million gun cup.,http://perez.com/,53,Technology,1999-10-24 22:53:00.323678+00:00 -Vote already after wait bill.,https://moore.com/,30,Technology,1971-10-28 04:06:36.023434+00:00 -Spend help husband stand.,https://www.jones.biz/,3,Sports,2010-01-15 14:58:23.937757+00:00 -Painting charge save society create paper fine.,https://paul.com/,34,Business,1995-06-02 00:28:07.948236+00:00 -Involve develop image everything expect.,http://kaiser.biz/,48,Business,2008-07-11 19:02:23.674668+00:00 -Difference window however discussion have none poor.,https://www.daugherty.info/,76,Technology,1974-07-10 23:22:39.990955+00:00 -Pattern list rich note discussion should.,https://yang-flowers.com/,42,Business,2002-04-13 10:50:32.443917+00:00 -Environmental decide none.,https://chung-young.info/,91,Science,1997-09-26 09:35:52.537529+00:00 -Tv international back camera.,http://www.goodman.com/,5,Science,1996-09-12 09:18:23.516355+00:00 -Sort maintain see natural human father itself family.,https://preston.com/,67,Technology,2013-04-14 08:31:10.837176+00:00 -Mind economy leg view until result.,http://www.espinoza.com/,20,Science,1973-08-13 08:07:22.896741+00:00 -Find could really.,https://brown.org/,21,Sports,1988-11-23 06:25:46.206594+00:00 -Third general add cover exist treatment international.,https://wang.com/,10,Business,1978-03-21 13:20:41.538682+00:00 -Evidence enjoy bill someone onto.,https://ingram.org/,86,Science,1981-04-14 18:11:50.990859+00:00 -Situation writer before.,http://www.alvarez.com/,10,Sports,1977-05-21 08:52:02.905100+00:00 -International line few service drive hotel.,https://collins-boyd.net/,11,Business,2015-05-29 19:02:46.579684+00:00 -Watch street represent style.,https://mccormick.biz/,84,Health,1979-08-07 11:56:06.854212+00:00 -Talk side generation interest pay then.,http://white-adkins.com/,26,Science,1989-06-18 19:09:29.734380+00:00 -Accept financial behind hard effect.,http://holland.info/,51,Sports,2023-09-08 12:00:13.807965+00:00 -Ever arm occur air responsibility.,https://thompson-brady.net/,14,Sports,1989-12-04 00:29:31.707496+00:00 -Thus check save that capital information suffer argue.,http://hodge.com/,84,Business,2009-10-05 06:08:14.205145+00:00 -Student summer first remember town only.,https://www.powell.com/,25,Business,1977-12-14 21:48:05.829187+00:00 -Base kitchen every cup measure.,http://www.hill-bell.com/,60,Business,2015-01-29 22:52:58.856931+00:00 -Not leg break bed.,http://www.garza-sheppard.com/,2,Technology,2004-05-24 16:49:15.970010+00:00 -Ball right east study politics top course.,http://gray.com/,36,Technology,2000-06-19 00:15:59.367172+00:00 -Difficult could third computer far recently.,https://www.kelly-cox.net/,58,Technology,1971-12-25 07:47:23.419092+00:00 -History Mrs decision reduce get specific someone debate.,https://www.ayala.com/,15,Business,2005-06-30 03:53:08.494991+00:00 -Little level follow nor.,http://garcia.com/,54,Science,1988-01-18 07:16:02.879879+00:00 -Cost group clear national.,http://bryant.biz/,61,Technology,1983-08-14 19:28:37.796442+00:00 -Over lot husband always upon doctor.,http://schroeder-finley.biz/,76,Sports,1978-11-05 01:41:08.118905+00:00 -Two though whole wall.,http://www.alvarado.com/,40,Technology,2013-03-19 06:43:25.192384+00:00 -How leave probably customer common film continue.,http://ramsey.net/,100,Health,1970-04-08 08:59:42.311649+00:00 -General likely rather recognize.,https://mccarthy-hines.com/,50,Technology,1987-03-02 05:13:31.814915+00:00 -Hundred force executive push.,https://jones.org/,4,Technology,2017-09-30 09:13:20.688116+00:00 -Should hard fund statement.,https://www.hall.com/,56,Sports,1988-04-23 06:53:36.159656+00:00 -International expect hit start.,http://white-sanders.net/,14,Science,2013-06-20 14:36:35.165788+00:00 -Rather big television sound activity five.,https://tucker.biz/,55,Sports,1986-05-02 21:52:10.713889+00:00 -Score break southern traditional maybe.,https://bonilla.biz/,69,Science,2016-12-03 14:31:44.718031+00:00 -Character front various opportunity.,http://williams-campbell.com/,63,Science,1989-10-17 04:09:11.115606+00:00 -Rather education front fast standard myself condition ground.,http://evans-johnson.com/,46,Sports,1998-08-23 23:16:18.287150+00:00 -Eat base behind risk plan issue.,https://barker-sanchez.net/,73,Science,1972-03-11 07:40:27.112803+00:00 -Head chair with point think high.,http://curry.com/,93,Sports,2015-02-13 16:15:17.706067+00:00 -Whole soldier number yet past much.,http://www.berry-lewis.biz/,36,Technology,1984-12-04 22:11:53.775255+00:00 -Painting travel question.,https://www.clark-casey.com/,36,Health,1994-12-07 14:13:13.636698+00:00 -Best different remember determine particularly rock.,https://larson.net/,2,Health,1988-05-13 03:55:03.383458+00:00 -State charge take material mother color.,https://johnson-phelps.com/,81,Science,2000-04-06 20:51:26.292631+00:00 -Bar security morning fight seem later.,https://www.weber.org/,46,Business,1993-04-07 21:22:53.419282+00:00 -Seem exist season.,http://anderson.com/,51,Technology,1981-05-24 02:42:23.897431+00:00 -Number condition city since.,https://potter-barnes.org/,36,Health,2011-07-18 00:18:10.032984+00:00 -Black those whether seven fish movie hot teacher.,https://mckenzie-cole.com/,52,Health,1970-03-07 17:18:25.244383+00:00 -Several prevent raise better oil individual.,https://fields.com/,18,Business,2003-06-04 17:43:20.415095+00:00 -Outside rule watch foot.,http://www.garrett.com/,96,Business,2017-08-20 12:13:58.142698+00:00 -Need tell sure claim hard themselves.,http://morse.com/,27,Business,1994-11-12 17:56:50.335194+00:00 -Raise kitchen drop computer left.,http://sanchez.com/,1,Sports,2006-10-09 16:53:03.425144+00:00 -Term yet television audience indicate employee.,http://www.castillo-barber.com/,51,Sports,1989-08-04 16:52:59.784454+00:00 -Picture key skin billion suggest serious often.,http://www.gutierrez-morgan.com/,62,Technology,1984-04-05 19:12:04.813825+00:00 -Politics available tonight man each employee.,https://cobb-lyons.org/,36,Business,1971-12-05 21:34:30.191939+00:00 -Challenge eye determine he.,https://duffy-moore.com/,61,Science,2006-04-20 11:53:30.124361+00:00 -Season reason chair factor.,https://perez.com/,75,Sports,1983-12-27 15:12:46.705921+00:00 -Seat you image peace color general choose.,https://grimes-washington.com/,68,Science,1979-06-16 19:17:14.407024+00:00 -Add water garden finally difference everything.,http://www.porter-hudson.com/,96,Business,2018-06-04 10:56:58.614022+00:00 -Hair allow phone weight medical.,https://www.griffin.com/,32,Health,2012-12-26 04:23:15.095948+00:00 -Ready practice each ready.,https://www.krause.net/,36,Sports,2010-01-24 15:39:11.181767+00:00 -Town car few debate.,http://garcia-keith.com/,41,Sports,1980-08-04 02:27:21.861688+00:00 -Kind live between operation.,https://smith.info/,100,Technology,2001-10-02 14:42:14.480573+00:00 -Sense yeah edge imagine establish piece forget low.,https://camacho-wright.com/,87,Business,1997-02-14 12:28:38.837319+00:00 -Key with watch maintain five window like clear.,http://torres.net/,24,Technology,2011-12-28 07:18:42.867524+00:00 -Degree maybe floor most bill.,https://www.powers.biz/,22,Technology,2002-11-23 19:26:57.382066+00:00 -For might listen organization.,https://cantu.com/,48,Technology,1986-07-06 11:03:12.620315+00:00 -Stop firm final none several accept.,https://www.gomez-trujillo.info/,99,Health,1971-01-19 14:25:40.942678+00:00 -Decade respond far health focus.,https://castro.net/,57,Health,1975-10-05 19:59:01.480952+00:00 -Civil reveal walk or weight.,http://moyer.org/,76,Science,1973-03-14 02:13:12.344545+00:00 -Policy everybody hard type.,http://gilmore.com/,100,Science,2014-06-11 16:22:58.571115+00:00 -Operation off no just.,https://schmidt.com/,90,Health,2021-08-10 14:22:26.715002+00:00 -Toward knowledge this contain life leave.,https://www.mata.com/,18,Sports,2004-08-17 06:32:13.439497+00:00 -How main answer more tree mention.,http://moyer.net/,63,Health,2000-12-28 12:21:11.209127+00:00 -Same north wind sister.,http://www.nielsen.com/,99,Science,2014-02-02 14:24:08.457692+00:00 -Later thing school practice authority possible fund.,https://www.andrews-smith.biz/,3,Sports,1982-02-22 07:48:23.315373+00:00 -Character talk once should mother president.,http://camacho.com/,40,Technology,1975-06-10 21:32:11.286109+00:00 -Own suddenly single music west force firm.,http://www.turner.com/,30,Technology,2011-08-23 08:35:53.098547+00:00 -Case together keep.,https://www.dillon-dickerson.com/,49,Sports,1994-02-02 19:44:39.552419+00:00 -Cup side opportunity by wide medical.,http://www.martinez.org/,46,Health,1992-05-19 21:03:29.176276+00:00 -World son growth effort born.,https://www.james.biz/,13,Health,1989-06-07 00:05:27.516589+00:00 -Student its summer feel yourself.,http://smith-brandt.com/,6,Technology,2015-09-09 15:48:34.807521+00:00 -North campaign choose reason.,http://www.lang.com/,80,Science,2014-05-21 19:24:36.960321+00:00 -Common view whole score special consider.,http://www.jimenez.biz/,81,Health,2019-01-04 01:03:12.589069+00:00 -Perform agreement ago agreement like.,http://ryan.net/,22,Technology,1975-05-26 12:18:19.569321+00:00 -Today begin write both little work.,https://www.pacheco.com/,17,Health,2005-05-22 14:32:31.005255+00:00 -Whatever protect benefit manage store number.,http://www.anderson.com/,96,Sports,2008-09-02 18:04:06.820448+00:00 -Thought black difficult continue process.,https://garza.com/,98,Technology,1987-09-07 00:22:00.543802+00:00 -Product often various alone town his.,http://www.smith.com/,31,Technology,1991-05-24 05:45:56.516081+00:00 -Occur low gas.,https://chavez.net/,57,Sports,2010-03-02 01:59:15.237224+00:00 -Under front throughout effort southern maintain nature.,http://baker.com/,70,Technology,1975-02-06 13:54:35.129821+00:00 -Store build require call girl laugh network.,http://www.wilkins-blackwell.net/,25,Technology,2015-04-16 21:29:31.809315+00:00 -Simply cup seat moment sister.,http://hall.com/,21,Sports,2006-09-07 16:04:39.425659+00:00 -Difficult us respond must western interest dark any.,https://santana-stanton.info/,60,Health,1977-05-04 12:45:19.466458+00:00 -A edge new find type.,http://www.taylor-castillo.com/,98,Technology,1991-02-04 10:23:44.476214+00:00 -Position hospital cover strong.,http://www.taylor-morales.org/,87,Technology,1981-02-05 07:33:21.440604+00:00 -Public set action bring.,https://www.george.com/,70,Business,1994-07-01 14:23:47.387628+00:00 -They senior result themselves.,http://rodriguez-owens.org/,22,Science,2010-11-22 21:54:43.060382+00:00 -Mission become rule where ten.,http://richmond.com/,93,Health,2018-10-12 08:39:50.345899+00:00 -Forget project economy ground actually require magazine.,https://www.hamilton.info/,72,Business,2015-02-24 16:18:39.817531+00:00 -Parent that property among blue study.,http://www.levy.com/,75,Business,1992-09-28 20:17:27.054063+00:00 -Responsibility always positive key leave tell.,http://garner.com/,73,Technology,1971-04-21 11:05:53.069320+00:00 -Better turn soon rock support effect.,https://hill.com/,59,Health,2011-06-10 19:44:51.095772+00:00 -Decide side natural focus.,https://kirk.com/,46,Business,1999-01-08 03:38:29.726565+00:00 -First figure trial bed image.,https://www.harris-chase.info/,45,Health,2013-06-14 01:58:00.553423+00:00 -Senior where follow born ok training.,https://www.montgomery-jones.com/,12,Business,2019-04-26 14:45:11.426185+00:00 -System certainly future government card history.,https://www.key-ramos.com/,32,Business,1978-05-11 13:25:56.171234+00:00 -Ago difference reason despite themselves.,https://www.smith.com/,79,Science,2015-03-06 21:27:18.631966+00:00 -News your court control million hundred.,https://robles.biz/,48,Health,2018-07-12 05:31:12.087168+00:00 -Floor ground when care team.,http://www.pollard.info/,69,Health,1975-08-26 22:00:27.590030+00:00 -Direction give reduce with start difference especially student.,https://garza-brown.com/,3,Business,1994-03-07 10:20:45.566443+00:00 -Time live relationship as.,https://www.hill.com/,92,Business,1996-06-18 05:44:16.217576+00:00 -Worry Democrat laugh Mrs.,https://hudson.biz/,19,Business,2022-11-06 02:23:45.081546+00:00 -Fire according participant Republican group build within pass.,https://www.lee.com/,24,Sports,2005-01-05 09:41:15.137415+00:00 -Rock forward behind idea red rate source.,https://boone.com/,40,Health,1979-07-26 15:40:30.589022+00:00 -These matter continue husband dinner worry.,http://walton-warren.com/,78,Sports,2022-12-12 04:01:42.520861+00:00 -Radio believe ability world.,http://www.cruz-johnson.org/,73,Health,2016-05-01 20:40:35.341243+00:00 -Country per consider despite.,https://www.gaines.info/,91,Technology,1997-05-09 12:52:43.547140+00:00 -Air although Mr when wait southern.,https://bailey.org/,67,Technology,2018-07-15 17:31:27.567258+00:00 -Cover choice party front lawyer probably story.,https://www.spencer.com/,63,Technology,2012-10-23 20:29:31.848195+00:00 -Group true TV.,http://www.pena.net/,67,Science,2010-10-23 15:56:27.094060+00:00 -Executive career shake fall phone.,https://miller-johnson.com/,42,Business,1996-02-07 04:40:36.682245+00:00 -Fill yeah side side recognize month.,http://www.parker.org/,70,Technology,2004-07-22 12:26:33.711404+00:00 -Move another field.,http://williams.net/,68,Technology,1997-06-06 10:30:39.760790+00:00 -Treat better wear.,http://klein.com/,74,Business,1970-11-04 01:24:03.727893+00:00 -Deal ready under above drive sport.,https://www.serrano-watson.biz/,6,Technology,1977-08-21 19:34:14.819999+00:00 -Perform election two quite report.,https://www.doyle.biz/,53,Science,1984-12-29 04:52:35.225618+00:00 -Stage major outside phone house.,https://www.norris.com/,63,Sports,1979-11-20 18:37:31.401292+00:00 -Cold science western resource reason expect.,https://green.com/,12,Business,2015-08-21 17:51:14.412667+00:00 -Camera yet light body director.,https://www.farmer.com/,56,Business,1989-12-30 23:03:13.129292+00:00 -Kind shoulder production mean.,http://www.stewart.com/,33,Technology,1988-11-16 15:49:49.362738+00:00 -Top argue herself water measure.,http://www.lee.info/,9,Technology,2002-06-10 03:35:49.949243+00:00 -News century who arm range.,https://www.le.com/,19,Sports,2006-11-23 20:36:10.148906+00:00 -High none sport.,http://www.johnson.info/,74,Science,1985-03-25 11:46:48.583338+00:00 -Hit travel down make economic woman PM look.,http://suarez.com/,50,Health,2017-07-20 11:51:48.335097+00:00 -Science it feel amount improve they.,http://larsen.com/,52,Science,2017-08-22 15:53:44.685994+00:00 -Safe know home hair for human rich.,http://www.marquez.com/,94,Business,1989-08-05 12:33:43.046031+00:00 -Nature shake impact popular degree center board.,https://jenkins.com/,85,Science,2020-07-11 05:03:56.212000+00:00 -Congress kitchen really history project event.,http://casey.info/,32,Technology,2011-02-12 20:21:09.455722+00:00 -First usually partner same then put skin.,http://www.mora.com/,12,Health,2020-03-23 17:37:27.104445+00:00 -Data character defense subject guy training.,https://padilla-roberts.com/,72,Technology,2018-07-27 06:41:39.001378+00:00 -Clear budget travel important.,https://charles.com/,57,Sports,2010-09-29 01:11:16.067417+00:00 -They list local short.,https://armstrong.info/,86,Business,1988-07-15 19:49:03.497678+00:00 -Write though attention career feel hour.,http://www.gonzalez.com/,46,Business,2023-01-11 21:41:18.470754+00:00 -Administration through individual college surface court exactly.,http://bullock-williams.com/,90,Health,1984-03-09 08:13:22.627054+00:00 -Front evening yet prevent process four.,http://www.smith.com/,61,Science,1990-12-10 11:37:22.258827+00:00 -Recent friend sort table six.,https://hahn.com/,71,Technology,1996-05-10 06:14:18.640908+00:00 -Bar last public south tree.,http://www.drake.com/,10,Technology,1990-02-17 22:10:02.222789+00:00 -Occur down beautiful away.,http://jones.info/,81,Health,2012-05-20 20:15:17.408296+00:00 -Partner argue catch truth.,http://www.higgins.com/,56,Technology,1981-05-02 17:22:53.390607+00:00 -Hit personal cover stage.,https://www.roberts-lowe.com/,57,Technology,1997-05-14 13:42:39.703700+00:00 -Compare culture century my.,http://www.gill.com/,21,Science,2004-04-02 13:00:00.666446+00:00 -Father include political design economy general drive according.,https://brown.info/,45,Sports,2003-06-03 06:19:12.949577+00:00 -Bag share respond activity season because.,https://www.baker.info/,48,Science,1986-03-10 14:19:56.231844+00:00 -Finish yourself couple onto amount good let.,https://ford-nolan.info/,20,Technology,2009-10-13 05:48:49.565165+00:00 -Information agency cause first wrong place.,https://hamilton.com/,20,Business,2021-01-02 21:16:26.828603+00:00 -Believe hospital change front against.,https://www.gallegos-nash.com/,30,Technology,1996-03-26 01:16:22.900087+00:00 -Item water civil why art which at.,https://roberts.com/,54,Science,1970-09-13 08:43:22.932681+00:00 -Place full financial peace particularly.,http://hardy-roman.com/,48,Business,2022-04-28 03:22:59.099076+00:00 -Adult have early war.,http://moon.net/,44,Business,2012-10-19 04:58:47.950237+00:00 -Top whole sit occur treat.,http://www.elliott-zamora.com/,66,Sports,1990-09-17 17:12:09.463379+00:00 -Perhaps ball make owner.,https://www.price-lee.com/,92,Technology,1989-08-12 18:16:25.035232+00:00 -Listen development able green.,https://www.frye-washington.com/,78,Business,1980-10-14 15:14:50.718421+00:00 -Whatever our build safe save.,http://gibbs.com/,39,Health,1981-01-04 23:35:19.689226+00:00 -Agent prepare several bad let.,http://reyes.com/,14,Health,1994-08-01 15:09:05.191897+00:00 -Relationship law clear campaign what.,http://www.tran-wood.com/,93,Science,1999-08-30 08:11:06.117378+00:00 -Its food concern party set poor food series.,http://www.love.com/,18,Technology,1975-07-07 14:44:58.039747+00:00 -Vote dog senior win.,https://www.cooper-jones.biz/,70,Sports,1998-04-16 05:05:55.436949+00:00 -Reason nature become debate city several.,https://www.brown.net/,4,Sports,2004-08-30 07:33:46.399678+00:00 -Long among serve wear west.,http://hughes-walters.biz/,3,Business,2011-04-11 06:41:58.522309+00:00 -Age resource increase attack customer along director.,https://www.miles-perkins.com/,66,Technology,1991-06-01 11:16:13.652275+00:00 -Movie trip season example.,https://davenport.com/,43,Technology,1987-08-20 13:39:35.918484+00:00 -Hotel practice later knowledge whose.,https://www.rodriguez.com/,96,Technology,1980-10-16 07:05:13.097975+00:00 -Save fear ready Democrat order field less.,http://dixon.com/,28,Technology,2016-05-21 13:00:16.133937+00:00 -Matter result result leader hospital.,http://hoffman-mitchell.com/,10,Business,1975-10-14 05:47:53.773611+00:00 -System people tax.,https://www.wallace-daniel.com/,13,Sports,1989-11-04 19:04:54.242297+00:00 -Wife score health evidence president.,https://www.turner.info/,27,Sports,2019-07-28 08:20:21.970164+00:00 -They usually company.,http://www.young.net/,35,Health,1971-03-21 05:46:23.489045+00:00 -Look rule check assume question air market.,https://dudley.com/,85,Sports,2021-09-23 11:43:09.532791+00:00 -Report rest dark environment provide.,http://www.west.org/,56,Business,2021-05-03 01:53:56.469708+00:00 -Social resource score expert because happen star.,http://winters.com/,95,Health,1991-12-11 20:27:23.034379+00:00 -I level girl factor sport seem manager my.,https://guzman.com/,46,Science,1994-04-21 17:35:29.666113+00:00 -Technology clearly amount media teacher person out.,http://www.summers.com/,54,Sports,2023-05-28 10:21:33.779189+00:00 -Among billion about various professor clearly dinner.,https://nguyen.net/,51,Business,2005-06-01 19:10:49.534743+00:00 -Despite go impact seem blood policy.,https://www.woodard.net/,14,Science,1977-10-13 17:05:07.295616+00:00 -Reason fill sense field.,http://www.johnson.com/,16,Technology,1980-01-06 03:54:14.915436+00:00 -Attack laugh budget war.,https://www.harper.com/,55,Business,2001-10-17 10:42:21.462998+00:00 -Play order quickly with matter goal eight.,https://carrillo.com/,88,Science,2023-08-17 20:20:41.547866+00:00 -Although maintain town little myself bar.,http://curry-allen.info/,5,Health,1986-09-14 23:35:23.359953+00:00 -Describe improve act campaign television sit very.,https://www.white.net/,84,Sports,1983-08-27 05:00:26.437459+00:00 -Recently make total fall.,http://www.burke-simmons.com/,10,Technology,1995-12-30 15:08:59.599346+00:00 -Accept simply southern town significant.,http://walker.org/,64,Technology,2008-06-27 00:12:18.357435+00:00 -Big cause science water.,http://richardson-hale.com/,91,Business,2018-05-26 16:32:13.524719+00:00 -Play blood list everything newspaper institution.,http://www.rich-ortega.info/,52,Business,2019-07-25 17:59:50.231660+00:00 -Building class now professor.,https://baker.com/,75,Science,1976-03-31 08:45:25.010773+00:00 -Discussion challenge card.,https://www.brown-french.com/,68,Health,2001-07-28 08:29:20.037380+00:00 -Field specific floor.,http://lopez.info/,62,Technology,1975-05-05 11:26:19.548846+00:00 -Need phone teach audience project tax.,http://www.palmer-archer.org/,22,Technology,2003-10-20 23:48:32.311641+00:00 -Style gun different energy arrive it.,https://lee.com/,51,Science,2002-09-12 05:02:20.078627+00:00 -Security camera past so thousand.,http://www.boyd.org/,4,Science,1973-07-30 06:39:21.574304+00:00 -Relate since significant always.,https://marshall-roman.com/,39,Business,2002-08-20 17:57:00.284008+00:00 -Talk like maintain serious style.,http://davis-burns.com/,69,Business,2003-03-26 18:12:32.186696+00:00 -Edge environmental even eye.,http://solomon-knight.com/,40,Technology,2014-07-09 17:38:05.851383+00:00 -Put everything our prove could party skin.,https://www.parks.com/,60,Business,1998-01-16 06:48:34.189294+00:00 -Right then police day.,https://www.pacheco.com/,73,Science,1998-12-22 21:11:11.157220+00:00 -Effort author buy heavy.,https://www.lopez.info/,59,Business,2001-06-19 04:20:09.554302+00:00 -Memory inside enter word physical.,https://www.kim-edwards.com/,24,Sports,1989-06-25 13:31:43.988630+00:00 -American audience blue.,https://www.frost.biz/,27,Sports,2018-01-21 02:12:39.687661+00:00 -Art born with rest exist drive determine.,https://hernandez.org/,41,Technology,1979-04-07 23:50:42.798412+00:00 -Data pattern west before science without.,https://davis.com/,31,Science,2002-11-13 15:34:01.566207+00:00 -Truth evidence about provide.,https://lewis-garcia.com/,53,Science,1993-11-10 10:39:15.958577+00:00 -Sure seek age share day pull.,https://smith-romero.biz/,57,Business,1981-01-29 04:41:58.625023+00:00 -Region over recognize help.,https://www.edwards.com/,32,Health,2018-06-10 17:12:35.061460+00:00 -Over like agency very during recently.,https://www.armstrong-ward.com/,63,Science,2009-01-16 20:03:38.265419+00:00 -Become effort direction hand young long.,http://www.newman.com/,49,Health,1972-08-02 19:31:58.878820+00:00 -Bed trouble begin receive anyone.,http://hernandez-thomas.biz/,36,Technology,1982-05-09 07:30:48.882143+00:00 -Coach at the per of.,http://harrison.com/,26,Business,2008-06-01 10:19:41.782366+00:00 -Significant especially situation move.,https://white.com/,42,Technology,2011-04-27 06:55:01.540181+00:00 -Spend may thus hour cold light suffer.,http://logan.com/,92,Technology,1978-09-27 21:14:52.559518+00:00 -Senior attack help a leave sometimes quality house.,https://harding.biz/,97,Health,1998-03-23 09:44:14.865085+00:00 -Cold teach call already available yes.,https://www.strong.com/,41,Science,1980-09-17 22:37:00.964670+00:00 -Democratic simple seek.,https://www.leon.com/,57,Health,1996-03-30 19:12:12.071226+00:00 -Require he daughter candidate where early certainly.,http://brown-marshall.biz/,84,Science,1974-03-19 10:35:26.846766+00:00 -Great mention local value than.,https://www.rivera-collins.net/,22,Sports,2023-06-28 20:28:57.428071+00:00 -Season no art meeting.,https://www.powell-lindsey.info/,45,Business,1987-08-10 11:00:35.445900+00:00 -Anything military but just maintain shake.,http://www.fitzpatrick.com/,85,Science,2009-06-09 00:37:53.169098+00:00 -Century several cultural pattern every.,http://www.turner.com/,54,Business,1980-06-10 17:17:09.256330+00:00 -Interview nothing wrong decision.,http://www.baker.com/,29,Science,2019-05-05 05:09:12.398985+00:00 -Understand management bag least.,https://sosa-scott.net/,71,Health,2004-01-19 06:05:37.945497+00:00 -Money property structure operation radio south.,http://www.hamilton.com/,66,Health,1996-11-24 01:11:44.690600+00:00 -Anything option soon election station never.,http://www.stein-howe.net/,65,Health,1992-03-31 23:37:16.411826+00:00 -Information former ball class loss up.,http://graham.com/,54,Science,2022-10-01 06:15:38.687851+00:00 -Visit brother dog these man when.,http://atkinson-mitchell.org/,22,Health,1978-08-01 21:03:52.205831+00:00 -Order table beyond bank scene change.,https://michael.com/,7,Sports,1995-05-29 05:37:34.172083+00:00 -Card worker happen.,https://www.martin.com/,13,Sports,2023-09-19 21:01:46.219356+00:00 -Write pass start court.,http://rodriguez-watkins.org/,68,Sports,2014-11-06 05:00:42.384827+00:00 -Catch left system church simply.,http://walker.com/,86,Technology,2007-06-20 10:39:15.626771+00:00 -Grow meeting family relationship husband southern dog.,https://allen.com/,5,Sports,1973-03-02 21:25:29.281039+00:00 -Challenge your call reveal.,https://flores.biz/,16,Business,2009-04-29 06:36:20.322812+00:00 -Visit decide list almost choose.,http://williams.biz/,24,Science,1994-12-28 12:28:55.711201+00:00 -Situation television last staff seat religious.,https://www.booth.com/,54,Sports,1996-04-02 03:08:51.454199+00:00 -Challenge this drive page whatever trade.,http://townsend.com/,92,Health,2013-02-22 01:18:00.555012+00:00 -Front bit sign wide cut challenge.,https://chaney.biz/,99,Sports,1994-07-13 02:54:41.347352+00:00 -Natural much sense and forward imagine phone social.,https://sloan-lynch.com/,41,Business,1982-01-07 10:25:14.015985+00:00 -Feel present although director toward.,https://smith-perez.com/,58,Technology,1991-05-28 12:07:41.586025+00:00 -Answer away sort movement tonight vote.,http://alvarado.net/,85,Business,1985-07-03 16:47:14.809837+00:00 -Talk understand home rise.,http://www.cohen.com/,83,Technology,2021-01-13 10:25:12.264457+00:00 -Between use room.,http://scott.com/,47,Sports,2011-04-02 12:52:07.313561+00:00 -Finally my nature attention work.,http://www.jones.com/,15,Science,1988-03-13 18:48:14.462818+00:00 -She single let business property everything bar.,http://hughes.net/,33,Sports,2019-02-11 05:29:56.041905+00:00 -Source soldier beat as certainly civil spring movement.,https://peterson.biz/,14,Business,2011-10-31 14:43:06.269494+00:00 -Defense couple point similar security at.,http://townsend.com/,6,Sports,1994-06-02 21:18:48.815648+00:00 -Number admit professor.,https://brady-smith.com/,60,Sports,1977-03-25 08:29:30.072066+00:00 -Organization no bring career.,https://weber.com/,32,Health,1998-12-16 10:36:13.955907+00:00 -Fall teach area feel who carry.,https://ramirez.com/,68,Health,1974-01-19 03:09:04.354564+00:00 -School imagine ago expect continue.,http://www.morrison.com/,34,Health,1987-03-01 07:58:17.370090+00:00 -Consumer defense song discuss.,https://fischer.net/,74,Business,1986-01-09 17:12:22.824827+00:00 -Price beyond idea us find there behavior.,https://williams.org/,1,Sports,2018-01-22 21:08:30.625340+00:00 -Light guess my.,https://www.ayers.com/,22,Technology,2004-12-18 06:17:04.523412+00:00 -Red billion speech like rock call scene.,http://maldonado.org/,94,Technology,1996-07-24 12:46:09.958239+00:00 -Arrive decision project court month.,https://www.beltran-jenkins.org/,37,Health,1983-06-06 00:28:25.329956+00:00 -Religious effort painting ability.,http://lee.com/,83,Business,1976-06-20 15:26:45.968624+00:00 -Car American your seat rule.,https://www.hill.org/,1,Technology,2008-09-02 03:09:04.974157+00:00 -Including game language.,http://www.nguyen-jenkins.net/,88,Health,1993-11-27 20:33:04.818995+00:00 -Tax certainly home could program.,http://avery-kim.com/,1,Science,1994-12-30 01:36:44.256892+00:00 -Sit street security modern.,http://keith.com/,36,Technology,1992-12-22 23:24:05.312551+00:00 -List several assume.,https://newman-cole.com/,62,Science,1984-07-15 20:34:44.854984+00:00 -Determine toward admit doctor magazine despite we.,http://www.griffith-boyd.com/,35,Business,2013-02-07 08:07:05.119744+00:00 -After note those drop executive special.,http://smith.net/,54,Business,1988-05-26 22:52:54.173925+00:00 -Cover board fill share economy source only.,https://james.com/,32,Health,1972-06-20 23:32:40.515950+00:00 -Ground serve mouth manage issue none.,http://www.huang.com/,60,Science,1998-11-05 01:42:57.040398+00:00 -Bill wrong nature responsibility.,http://www.wood.com/,2,Business,1992-01-22 22:13:20.476908+00:00 -Poor threat throw behavior type.,http://www.moody-herrera.com/,6,Science,2007-02-24 00:29:57.618491+00:00 -Cold serious rock unit majority yeah somebody.,https://www.gordon.org/,36,Sports,1993-01-10 18:00:32.483584+00:00 -Various send news clearly school wonder.,http://jackson.com/,95,Sports,2009-06-28 12:42:35.728091+00:00 diff --git a/data/raw/f_663_data_simon/test_data1_result.csv b/data/raw/f_663_data_simon/test_data1_result.csv deleted file mode 100644 index b661de0c..00000000 --- a/data/raw/f_663_data_simon/test_data1_result.csv +++ /dev/null @@ -1,6 +0,0 @@ -category,count,mean,min,max -Business,215,11.325581395348838,0,23 -Health,209,12.191387559808613,0,23 -Science,189,11.227513227513228,0,23 -Sports,171,11.403508771929825,0,23 -Technology,216,11.61574074074074,0,23 diff --git a/data/raw/f_663_data_simon/test_data2.csv b/data/raw/f_663_data_simon/test_data2.csv deleted file mode 100644 index f025108e..00000000 --- a/data/raw/f_663_data_simon/test_data2.csv +++ /dev/null @@ -1,101 +0,0 @@ -title,title_url,id,category,published_time -Especially more glass high you lot poor.,https://www.smith.com/,21,Health,2018-11-05 20:14:19.817807+00:00 -Office current message standard.,https://www.ellison.net/,29,Sports,2023-05-07 17:59:37.706182+00:00 -Water both institution career fine perform.,http://www.jackson.biz/,86,Business,1995-04-27 09:18:42.447986+00:00 -His air risk whose wish professional ground guy.,https://king.com/,48,Business,2000-11-27 07:23:53.398917+00:00 -Conference herself teacher allow agent put analysis.,http://www.walker.biz/,36,Science,1988-07-22 17:21:58.796162+00:00 -Method happy fund.,https://benjamin.com/,60,Business,1989-08-20 20:30:16.273852+00:00 -Short detail parent way trip.,https://www.norton.com/,32,Business,1976-04-04 11:28:35.007098+00:00 -Your me agency follow skin story.,http://www.shaw.info/,95,Technology,1994-03-13 22:06:32.001391+00:00 -Their everything want middle receive relate degree expect.,https://www.smith.com/,54,Technology,1983-01-03 10:26:58.504850+00:00 -Line arrive now responsibility view election.,http://chapman-santana.com/,75,Business,1988-09-25 16:49:46.977692+00:00 -Point his claim read choice air.,http://www.nichols-mccormick.biz/,13,Health,1980-03-09 15:46:57.828206+00:00 -Value ready must performance by.,https://www.russell.com/,88,Health,1977-08-24 13:29:06.986003+00:00 -Result financial conference question.,https://johnson.org/,57,Science,1979-09-03 05:26:16.829788+00:00 -Game draw force want account growth evidence cut.,https://black.net/,90,Health,1978-10-02 23:54:49.492863+00:00 -Home may relationship least size any sign.,https://nunez.net/,74,Health,1988-11-18 21:04:29.782458+00:00 -Focus matter guess.,https://www.woods.com/,33,Science,1972-06-23 02:09:02.657130+00:00 -Experience very out home their today.,https://www.green.org/,49,Science,1992-11-07 08:43:09.250312+00:00 -Fund shoulder manage vote spring.,http://www.houston.com/,67,Sports,2016-10-26 19:06:09.249624+00:00 -Woman trip side teacher find their.,http://smith.com/,21,Technology,2008-08-05 10:42:03.253761+00:00 -Until word similar national against.,http://jimenez.com/,84,Technology,2013-03-03 22:02:42.945204+00:00 -Main fall green style table throughout agreement.,https://www.vasquez.com/,88,Science,1999-08-03 14:24:39.963885+00:00 -Heart author true give discuss institution south student.,http://www.parks.org/,93,Sports,2001-12-25 07:16:51.666709+00:00 -Unit little deal series interest myself firm drop.,http://hatfield.com/,19,Health,1976-08-08 11:28:36.969078+00:00 -Outside protect actually direction lot room stay.,https://james.info/,44,Technology,2017-12-14 22:44:57.662200+00:00 -People ask game entire north stuff program.,https://cooper-ewing.com/,95,Business,2004-01-25 22:34:15.025449+00:00 -Guy choose perform everybody.,https://kelly-bright.com/,65,Business,1994-10-18 22:17:05.064877+00:00 -Person threat wait per.,https://www.rogers-frey.biz/,6,Business,1987-09-19 21:21:40.408606+00:00 -Available public big economic special writer purpose.,http://campbell.com/,25,Technology,1984-10-08 17:14:55.445059+00:00 -Pm operation long where.,https://www.myers-marshall.com/,35,Technology,1982-11-24 05:46:35.858747+00:00 -Six machine process resource support fund human.,http://www.zamora.biz/,87,Health,2018-08-28 09:30:31.519431+00:00 -Ready minute officer conference.,https://thomas.com/,68,Health,1983-03-31 13:48:07.109136+00:00 -Business story here make over.,http://www.james.com/,68,Health,2012-05-05 15:18:55.413240+00:00 -Treat she necessary social.,https://www.payne.net/,24,Business,2015-04-02 01:48:30.186895+00:00 -Job again management grow.,https://www.payne.info/,72,Business,1991-07-19 17:42:51.323272+00:00 -Suggest head too.,http://rose.com/,40,Technology,1986-02-25 21:04:59.550855+00:00 -Son section modern since quality.,https://mitchell.com/,15,Technology,1973-06-14 21:25:34.201799+00:00 -Care whom mouth computer.,https://www.smith-thomas.info/,73,Science,2007-10-20 13:33:39.985071+00:00 -Station seem experience sense.,https://www.best.com/,46,Health,1975-01-26 07:38:37.187591+00:00 -Head green always few the perform never.,http://wiggins.com/,41,Technology,2020-03-29 22:22:22.655211+00:00 -This beautiful when participant sea.,https://kim.com/,44,Sports,2004-01-16 20:39:27.377096+00:00 -Produce lose data despite attack behind.,https://robertson.info/,69,Science,2008-01-30 17:52:13.088017+00:00 -Suffer question opportunity collection likely.,https://www.harris.com/,23,Health,2003-08-27 12:27:47.787625+00:00 -Like type late bed pretty traditional.,http://www.ray.com/,81,Business,2005-09-27 13:29:07.275489+00:00 -Do room machine.,http://www.mathews.org/,89,Business,1999-12-30 01:32:16.684178+00:00 -Up wide soldier wife.,https://www.heath.com/,71,Health,2019-05-18 13:46:52.343151+00:00 -There prove interview among story call customer.,https://www.brown.net/,19,Science,1986-02-13 05:04:49.518278+00:00 -Become prepare remember style staff.,http://bauer.com/,43,Sports,1978-02-20 07:47:34.475615+00:00 -Act service he fight.,https://www.richards.com/,90,Science,2014-05-31 04:57:34.527952+00:00 -He evidence gun board foreign discover.,https://woods-smith.com/,52,Sports,1978-07-12 14:46:48.924281+00:00 -Usually quickly tell trouble hotel recent.,https://www.zamora.org/,58,Technology,2007-01-27 10:45:53.166559+00:00 -Fine fear road another them firm hotel.,https://www.lara.com/,11,Business,1987-09-29 13:52:22.957813+00:00 -Accept left green finally gas blue.,https://www.lee-perry.com/,100,Business,2013-11-18 13:32:23.004526+00:00 -Face anything somebody prepare remain.,http://gates.biz/,80,Sports,1974-11-25 05:42:11.551602+00:00 -Two nice style continue.,http://simmons-lopez.com/,40,Science,2010-09-16 12:49:34.552975+00:00 -Agreement arm message drive.,https://www.martin-fisher.biz/,10,Health,2016-05-09 15:49:44.793424+00:00 -Main suffer beyond.,http://stuart.net/,62,Science,2016-01-01 11:43:38.473487+00:00 -During live car cultural account letter will commercial.,http://fox-acosta.com/,27,Technology,2014-06-27 10:34:41.380623+00:00 -Against manage speech population the worry recent production.,http://www.stafford.com/,88,Technology,2019-11-05 18:51:28.380276+00:00 -Again until billion we.,https://campbell.biz/,85,Sports,2007-06-24 05:51:56.119349+00:00 -Business product mission.,http://www.duran.com/,4,Sports,1993-04-23 22:25:48.794972+00:00 -Performance ahead language sometimes his compare.,http://www.garcia-caldwell.net/,36,Science,2000-01-28 12:56:10.749556+00:00 -Debate each under debate major lawyer.,http://www.wilson.com/,56,Science,1989-12-29 20:49:00.836468+00:00 -Himself story market protect argue develop entire.,https://shelton.com/,1,Technology,1982-12-06 09:18:15.740752+00:00 -Information pass by also if agree use.,https://hernandez.org/,92,Science,2010-10-17 11:40:05.671637+00:00 -Live read between senior compare opportunity performance summer.,https://www.ochoa.org/,77,Sports,2022-09-11 17:00:14.063981+00:00 -Strong gun should trouble.,http://shelton.com/,100,Science,2014-06-10 17:43:20.081320+00:00 -Response worry center reality direction social second shoulder.,https://www.christian-rodriguez.org/,13,Health,1993-07-28 22:07:50.959262+00:00 -Put position food indicate to respond community.,http://www.parker-yoder.info/,33,Science,1976-10-30 15:40:42.455365+00:00 -Their ten money million.,http://www.howell.org/,65,Sports,1994-10-11 07:01:27.164112+00:00 -Health season out space.,http://fuentes.biz/,54,Science,2001-01-22 13:23:41.542720+00:00 -Happen remember pressure second girl themselves.,http://andrews.info/,4,Health,1998-10-03 03:14:35.922802+00:00 -Individual my weight sort.,http://romero.com/,18,Health,2008-08-21 07:16:00.503624+00:00 -Meet account western stand tend.,https://rodgers-lewis.biz/,62,Health,2018-08-05 11:31:07.409242+00:00 -Partner detail blood others price night.,http://jordan.com/,66,Technology,1995-04-22 15:45:01.207012+00:00 -Number himself surface unit rock let.,https://craig.biz/,50,Health,2003-09-27 23:54:21.896589+00:00 -Bag else close toward direction.,http://www.savage-roman.net/,36,Business,1971-03-25 23:50:04.394541+00:00 -Star guess could.,http://romero.com/,45,Science,1994-08-18 00:17:57.689395+00:00 -Our your its.,https://baird.info/,70,Health,2013-11-10 12:39:00.545719+00:00 -Measure impact radio candidate stay natural.,http://www.hansen-brown.net/,39,Technology,1975-10-26 01:12:36.474291+00:00 -Citizen foreign show often card.,https://www.gutierrez.org/,76,Health,2018-09-11 12:33:27.493868+00:00 -Stop detail return month.,http://hawkins-jackson.com/,50,Sports,1996-11-21 11:48:42.173289+00:00 -Mother involve sing.,http://www.hart-caldwell.com/,40,Health,2009-12-23 16:42:43.288814+00:00 -Factor product night.,https://davis-huff.com/,84,Health,1975-06-09 20:34:55.536355+00:00 -News manager term bag.,https://leon.com/,71,Health,2004-09-09 11:47:59.889730+00:00 -Former ask today by yeah.,https://yates-bennett.com/,23,Science,1971-04-21 00:55:51.750427+00:00 -These response do guess.,http://www.chavez-alexander.com/,34,Science,1989-08-31 11:30:46.512088+00:00 -Theory tax Congress fall respond customer.,https://nielsen.com/,29,Business,1983-02-07 17:10:06.202605+00:00 -Indicate month big.,http://www.hendrix-woods.org/,42,Science,1997-02-10 15:45:10.075698+00:00 -Summer might strategy draw site.,http://www.lopez.biz/,1,Science,1976-12-24 09:29:22.475304+00:00 -Almost hair cut trip stand carry number side.,https://www.silva.com/,40,Sports,1975-12-05 14:35:12.577956+00:00 -Nation white family local.,http://hood.com/,92,Health,1985-11-17 11:01:48.842985+00:00 -Successful well perform customer claim.,https://thomas.biz/,95,Technology,2021-02-18 11:01:05.924323+00:00 -What common home hour candidate century fire near.,http://www.benton.biz/,95,Science,1978-12-09 06:31:31.289598+00:00 -Meeting possible set product ability.,http://www.payne-palmer.com/,100,Science,2014-07-22 08:44:11.344851+00:00 -Center ten measure exactly consumer.,http://www.garcia-young.com/,100,Science,1977-03-01 05:21:53.388182+00:00 -Time sense position red.,https://www.sanford.com/,7,Business,2013-06-30 20:46:36.903427+00:00 -Material range beautiful crime my.,https://www.mcdaniel.com/,43,Technology,1971-03-20 02:12:34.867613+00:00 -Stuff compare which he.,https://www.duran.com/,29,Technology,1993-11-23 14:28:07.366160+00:00 -Position everyone go specific.,http://www.durham-solis.net/,95,Science,1973-08-16 14:31:34.857509+00:00 -Yourself begin end general boy clearly.,https://alexander.com/,68,Sports,2009-11-02 16:15:51.303936+00:00 diff --git a/data/raw/f_663_data_simon/test_data2_result.csv b/data/raw/f_663_data_simon/test_data2_result.csv deleted file mode 100644 index 013341d9..00000000 --- a/data/raw/f_663_data_simon/test_data2_result.csv +++ /dev/null @@ -1,6 +0,0 @@ -category,count,mean,min,max -Business,17,14.470588235294118,0,23 -Health,24,12.5,0,22 -Science,26,11.538461538461538,1,21 -Sports,14,12.571428571428571,0,21 -Technology,19,13.736842105263158,0,23 diff --git a/data/raw/f_663_data_simon/test_data3.csv b/data/raw/f_663_data_simon/test_data3.csv deleted file mode 100644 index 3216b527..00000000 --- a/data/raw/f_663_data_simon/test_data3.csv +++ /dev/null @@ -1,5 +0,0 @@ -title,title_url,id,category,published_time -Environment shoulder entire.,http://hart.com/,44,Business,1987-12-31 06:35:55.040786+00:00 -Clearly town similar these help.,http://west.com/,17,Sports,1985-09-25 07:00:44.632638+00:00 -Of enter study shoulder gas soon.,http://sanders.net/,56,Business,2004-06-01 18:02:06.574362+00:00 -Three main protect hear all senior consider star.,https://rogers.biz/,10,Science,1990-11-25 21:30:30.192707+00:00 diff --git a/data/raw/f_663_data_simon/test_data3_result.csv b/data/raw/f_663_data_simon/test_data3_result.csv deleted file mode 100644 index 97862a0e..00000000 --- a/data/raw/f_663_data_simon/test_data3_result.csv +++ /dev/null @@ -1,4 +0,0 @@ -category,count,mean,min,max -Business,2,8.0,2,14 -Science,1,5.0,5,5 -Sports,1,15.0,15,15 diff --git a/data/raw/f_663_simon.py b/data/raw/f_663_simon.py deleted file mode 100644 index 88c89453..00000000 --- a/data/raw/f_663_simon.py +++ /dev/null @@ -1,171 +0,0 @@ -import pandas as pd -import pytz - -def f_663(articles, timezone): - """ - Analyze the publication times of a list of articles: - 1) Convert 'published_time' to a specified timezone - 2) Group articles by 'category' - 3) For each category, calculate the count, mean, min, max publication times only considering the hour. - - Parameters: - articles (list): A list of dictionaries where each dictionary represents - an article with keys 'title', 'title_url', 'id', 'category', and 'published_time' (in UTC). - timezone (str): The string representation of the timezone to which the 'published_time' should be converted. - - Returns: - DataFrame: A pandas DataFrame with the count, mean, min, max publication hour for each category. - The category is the index of the DataFrame. - - Raises: - ValueError: If dictionary keys do not match the requirements. - TypeError: If articles is not a list of dictionaries. - ValueError: If an empty list is passed as articles. - - Requirements: - - pandas - - pytz - - Example: - >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 1 3.0 3 3 - Sports 1 19.0 19 19 - Technology 1 8.0 8 8 - - >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 1 3.0 3 3 - Sports 1 19.0 19 19 - Technology 1 8.0 8 8 - - >>> articles = [ - ... {'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': '09:01:04.403278+00:00'}, - ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': '02:03:04.403278+00:00'}, - ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': '21:11:01.403278+00:00'}, - ... {'title': 'newsies', 'title_url': 'newsies.news', 'id': 21, 'category': 'Technology', 'published_time': '4:25:12.403278+00:00'}, - ... {'title': 'ORF', 'title_url': 'orf.at', 'id': 44, 'category': 'Health', 'published_time': '03:04:03.403278+00:00'}, - ... {'title': 'ARD', 'title_url': 'ard.com', 'id': 61, 'category': 'Health', 'published_time': '11:41:12.403278+00:00'}] - >>> analysis_df = f_663(articles, 'America/New_York') - >>> print(analysis_df) - count mean min max - category - Health 3 14.666667 6 22 - Sports 1 21.000000 21 21 - Technology 2 13.500000 4 23 - """ - - if not isinstance(articles, list): - raise TypeError("articles should be a list of dictionaries.") - - if not all(isinstance(item, dict) for item in articles): - raise TypeError("articles should be a list of dictionaries.") - - - if len(articles) == 0: - raise ValueError("input articles list should contain at least one article.") - - if any(not sorted(dic.keys()) == ['category', 'id', 'published_time', 'title', 'title_url'] for dic in articles): - raise ValueError("input dictionaries must contain the following keys: 'category', 'id', 'title', 'title_url', 'published_time'") - - - tz = pytz.timezone(timezone) - for article in articles: - article['published_time'] = pd.to_datetime(article['published_time']).astimezone(tz) - - df = pd.DataFrame(articles) - df['published_time'] = df['published_time'].dt.hour - - analysis_df = df.groupby('category')['published_time'].agg(['count', 'mean', 'min', 'max']) - - return analysis_df - -import unittest -import pytz -import pandas as pd -from faker import Faker -import csv - -fake = Faker() - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -class TestCases(unittest.TestCase): - - def generate_random_articles(self, n, seed): - fake.seed_instance(seed=seed) - categories = ['Sports', 'Technology', 'Health', 'Science', 'Business'] - articles = [] - for _ in range(n): - article = { - 'title': fake.sentence(), - 'title_url': fake.url(), - 'id': fake.random_int(min=1, max=100), - 'category': fake.random_element(categories), - 'published_time': fake.date_time(tzinfo=pytz.UTC) - } - articles.append(article) - return articles - - def test_wrong_input(self): - self.assertRaises(Exception, f_663, [], 'UTC') - self.assertRaises(Exception, f_663, [{}], 'UTC') - self.assertRaises(Exception, f_663, [{'test': 2}], 'UTC') - - def test_case_1(self): - 'big dataset' - with open('f_663_data_simon/test_data1.csv', 'r') as csv_file: - reader = csv.DictReader(csv_file) - articles = list(reader) - analysis_df = f_663(articles, 'America/New_York') - - excepted_df = pd.read_csv('f_663_data_simon/test_data1_result.csv', index_col='category') - - self.assertTrue(pd.testing.assert_frame_equal(analysis_df, excepted_df) is None) - - def test_case_2(self): - 'medium dataset' - with open('f_663_data_simon/test_data2.csv', 'r') as csv_file: - reader = csv.DictReader(csv_file) - articles = list(reader) - analysis_df = f_663(articles, 'Europe/Vienna') - - excepted_df = pd.read_csv('f_663_data_simon/test_data2_result.csv', index_col='category') - - self.assertTrue(pd.testing.assert_frame_equal(analysis_df, excepted_df) is None) - - def test_case_3(self): - 'small dataset' - with open('f_663_data_simon/test_data3.csv', 'r') as csv_file: - reader = csv.DictReader(csv_file) - articles = list(reader) - analysis_df = f_663(articles, 'Asia/Shanghai') - - excepted_df = pd.read_csv('f_663_data_simon/test_data3_result.csv', index_col='category') - self.assertTrue(pd.testing.assert_frame_equal(analysis_df, excepted_df) is None) - - def test_case_5(self): - 'check general structure' - articles = self.generate_random_articles(534, seed=11) - analysis_df = f_663(articles, 'Africa/Cairo') - self.assertTrue(set(analysis_df.index).issubset(['Sports', 'Technology', 'Health', 'Science', 'Business'])) - self.assertIn('count', analysis_df.columns) - self.assertIn('mean', analysis_df.columns) - self.assertIn('min', analysis_df.columns) - self.assertIn('max', analysis_df.columns) -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_663_simon_chien_edit.py b/data/raw/f_663_simon_chien_edit.py new file mode 100644 index 00000000..ed1e96ec --- /dev/null +++ b/data/raw/f_663_simon_chien_edit.py @@ -0,0 +1,167 @@ +import pandas as pd +import pytz + + +def f_663(articles, timezone): + """ + Analyze the publication times of a list of articles: + 1) Convert 'published_time' to a specified timezone + 2) Group articles by 'category' + 3) For each category, calculate the count, mean, min, max publication times only considering the hour. + + Parameters: + articles (list): A list of dictionaries where each dictionary represents + an article with keys 'title', 'title_url', 'id', 'category', and 'published_time' (in UTC). + timezone (str): The string representation of the timezone to which the 'published_time' should be converted. + + Returns: + DataFrame: A pandas DataFrame with the count, mean, min, max publication hour for each category. + The category is the index of the DataFrame. + + Raises: + ValueError: If dictionary keys do not match the requirements. + TypeError: If articles is not a list of dictionaries. + ValueError: If an empty list is passed as articles. + + Requirements: + - pandas + - pytz + + Example: + >>> articles = [{'title': 'Apple News', 'title_url': 'Apple_News', 'id': 2, 'category': 'Technology', 'published_time': datetime(2023, 6, 15, 12, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'New York Times', 'title_url': 'New_York_Times', 'id': 4, 'category': 'Sports', 'published_time': datetime(2023, 6, 16, 23, 0, 0, tzinfo=pytz.UTC)}, + ... {'title': 'USA Today', 'title_url': 'USA_Today', 'id': 6, 'category': 'Health', 'published_time': datetime(2023, 6, 17, 7, 0, 0, tzinfo=pytz.UTC)}] + >>> analysis_df = f_663(articles, 'America/New_York') + >>> print(analysis_df) + count mean min max + category + Health 1 3.0 3 3 + Sports 1 19.0 19 19 + Technology 1 8.0 8 8 + """ + + if not isinstance(articles, list): + raise TypeError("articles should be a list of dictionaries.") + + if not all(isinstance(item, dict) for item in articles): + raise TypeError("articles should be a list of dictionaries.") + + if len(articles) == 0: + raise ValueError("input articles list should contain at least one article.") + + if any(not sorted(dic.keys()) == ['category', 'id', 'published_time', 'title', 'title_url'] for dic in articles): + raise ValueError( + "input dictionaries must contain the following keys: 'category', 'id', 'title', 'title_url', 'published_time'") + + tz = pytz.timezone(timezone) + for article in articles: + article['published_time'] = pd.to_datetime(article['published_time']).astimezone(tz) + + df = pd.DataFrame(articles) + df['published_time'] = df['published_time'].dt.hour + + analysis_df = df.groupby('category')['published_time'].agg(['count', 'mean', 'min', 'max']) + + return analysis_df + + +import unittest +import pandas as pd +import pytz +from datetime import datetime + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + self.articles = [ + {'title': 'Apple News', 'title_url': 'apple.com/news', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.UTC)}, + {'title': 'Sports Update', 'title_url': 'sports.com/update', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 15, 0, tzinfo=pytz.UTC)}, + {'title': 'Health Today', 'title_url': 'health.com/today', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 8, 0, tzinfo=pytz.UTC)} + ] + + def test_empty_articles_list(self): + # Test handling of empty list + with self.assertRaises(ValueError): + f_663([], 'America/New_York') + + def test_invalid_article_format(self): + # Test handling of improperly formatted articles list + with self.assertRaises(ValueError): + f_663([{'wrong_key': 'wrong_value'}], 'America/New_York') + + def test_conversion_and_grouping(self): + timezone = 'America/New_York' + result_df = f_663(self.articles, timezone) + + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 3.0, 'Sports': 10.0, 'Technology': 7.0}, + 'min': {'Health': 3, 'Sports': 10, 'Technology': 7}, + 'max': {'Health': 3, 'Sports': 10, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + # Ensure the data types match, especially for integer columns + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + expected_df.index.name = 'category' + + pd.testing.assert_frame_equal(result_df, expected_df) + + def test_article_timezone_conversion(self): + # Assuming test data has UTC as the base timezone and checking against London timezone + result = f_663(self.articles, 'Europe/London') + expected_hours = [8.0, 15.0, 12.0] + actual_hours = result.reset_index()['mean'].tolist() + self.assertEqual(expected_hours, actual_hours) + + def test_different_timezones_across_categories(self): + # Create a set of articles across different categories and timezones + articles = [ + {'title': 'Tech Trends', 'title_url': 'tech.com/trends', 'id': 1, 'category': 'Technology', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('UTC'))}, + {'title': 'World Sports', 'title_url': 'sports.com/world', 'id': 2, 'category': 'Sports', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('Asia/Tokyo'))}, # +9 hours from UTC + {'title': 'Health News', 'title_url': 'health.com/news', 'id': 3, 'category': 'Health', + 'published_time': datetime(2023, 1, 1, 12, 0, tzinfo=pytz.timezone('America/Los_Angeles'))} + # -8 hours from UTC + ] + timezone = 'America/New_York' # UTC-5 + result_df = f_663(articles, timezone) + + expected_data = { + 'count': {'Health': 1, 'Sports': 1, 'Technology': 1}, + 'mean': {'Health': 14.0, 'Sports': 21.0, 'Technology': 7.0}, + # Converting 12:00 from respective timezones to New York time + 'min': {'Health': 14, 'Sports': 21, 'Technology': 7}, + 'max': {'Health': 14, 'Sports': 21, 'Technology': 7} + } + expected_df = pd.DataFrame(expected_data) + expected_df.index.name = 'category' + + expected_df = expected_df.astype({ + 'min': 'int32', + 'max': 'int32', + 'count': 'int64', + 'mean': 'float64' + }) + + pd.testing.assert_frame_equal(result_df, expected_df) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_674_data_simon/test_data_0.csv b/data/raw/f_674_data_simon/test_data_0.csv deleted file mode 100644 index 5b5eb357..00000000 --- a/data/raw/f_674_data_simon/test_data_0.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Country,Gender -Kelly Dean,24,Jordan,Male -Joshua Molina,53,Nigeria,Female -Danielle Richardson,23,Guyana,Male -Luke Kim,68,Cuba,Male -Eric Potter,77,South Africa,Female -Sheila Conner,48,Congo,Female -James Shields,32,Martinique,Male -Kristy Pruitt,46,Uzbekistan,Male -Lauren Ware,44,Peru,Female -Gavin Molina,37,Belize,Male -Jessica Johnson,43,Guatemala,Male -Robert Stewart,70,Guadeloupe,Male -Chelsea Khan,54,Bahrain,Male -Summer Bean,51,Vietnam,Female -Madison Padilla,23,Korea,Female -Amy Sharp,58,Turks and Caicos Islands,Female -Tyler Hall,76,Samoa,Female -Wendy Jones,43,Italy,Male -Deborah Zavala,31,Algeria,Male -Allen White,40,Romania,Male -Haley Gill,54,New Zealand,Male -Henry Roman,25,Korea,Male -Kristin Arias,51,Nicaragua,Male -Lisa Stevens,65,Tokelau,Female -Melissa Serrano,74,Philippines,Female -Maria Lewis,42,Bahrain,Female -Elizabeth Lopez,37,Ukraine,Female -Terri Simmons,44,Myanmar,Male -Monica Brown,55,Ecuador,Female -Terry Stanton,42,Tajikistan,Male -Tiffany Ward,44,Saint Lucia,Male -Jesus Webb,45,Mongolia,Male -Tina Thomas,39,Nigeria,Male -Karen Thornton,63,Saint Kitts and Nevis,Male -Michele Collins,56,Russian Federation,Male -Brandon Thompson,23,Guernsey,Male -Brittany Williams,53,Guinea-Bissau,Male -Jennifer Collier,73,El Salvador,Female -Ryan Perez,78,Korea,Male -Rachel Sanchez,74,Saint Lucia,Male -Madison Silva,27,Togo,Male -Mr. Derrick Caldwell Jr.,66,Guyana,Male -Carlos Williams,71,Saint Barthelemy,Female -Jenna Williams,36,Cook Islands,Male -Rachel Graham MD,71,Netherlands,Male -Jane Porter,24,Liberia,Male -Ms. Olivia Kelly,31,Solomon Islands,Male -Melissa Phillips,73,Botswana,Male -Linda Jones,50,Latvia,Male -Anthony Boyle,76,Portugal,Male -Katie Gardner,80,Austria,Female -James Warren,78,Isle of Man,Female -Louis Espinoza,27,Portugal,Female -Catherine House,64,Afghanistan,Female -Christopher Morgan,39,American Samoa,Female -Shannon Edwards,80,Canada,Male -Alex Alvarado,66,Czech Republic,Male -Ariel Gomez,21,Burkina Faso,Female -Kelly Shannon,65,Italy,Male -Wendy Lewis,45,Brazil,Female -Joseph Dorsey,76,Botswana,Female -Sarah Craig,50,Latvia,Female -Gail Long,45,Sweden,Female -John Patel,50,Guernsey,Male -Jillian Knox,40,Venezuela,Male -Brandon Kim,30,Marshall Islands,Female -Michelle Griffin,54,Zambia,Male -Lisa Williams,22,El Salvador,Female -Maria Schaefer,38,Trinidad and Tobago,Male -Curtis Johnson,58,Anguilla,Female -Mary Bishop,41,Fiji,Male -Russell Allen,29,Moldova,Male -Alice George,54,Macedonia,Male -Timothy Reyes,68,Northern Mariana Islands,Female -Leslie Weaver,44,Solomon Islands,Female -Brian Snyder,79,Poland,Male -Karen Harrington,24,Martinique,Female -Julie Rojas,71,Saint Barthelemy,Male -Alicia Davis,38,Libyan Arab Jamahiriya,Female -Mark Johnson,26,Poland,Male -Jennifer Russo,52,Christmas Island,Female -Thomas Pitts,75,Bosnia and Herzegovina,Female -Daniel Garner,60,Bulgaria,Male -Betty Calhoun,49,American Samoa,Female -David Meyer,68,Vietnam,Male -Timothy Atkins,54,Jersey,Male -Andrea Hernandez,53,Israel,Female -Susan Walters,35,Eritrea,Male -Charles Knight,38,Mauritania,Female -Allen Collins,38,Philippines,Male -Adam King,38,Paraguay,Male -Kaitlyn Jones,66,Guinea-Bissau,Female -Donald Alvarado,27,San Marino,Male -Frank Matthews,47,Dominica,Male -Courtney Adams,46,Malaysia,Male -Suzanne Mckenzie,56,Estonia,Female -Nicholas Smith,37,Kenya,Male -Kristi Berry,39,Tonga,Male -Connor Christensen,24,Turkey,Female -Natasha Williamson,68,Tokelau,Male diff --git a/data/raw/f_674_data_simon/test_data_1.csv b/data/raw/f_674_data_simon/test_data_1.csv deleted file mode 100644 index e470b161..00000000 --- a/data/raw/f_674_data_simon/test_data_1.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Country,Gender -Steven Reed DVM,53,Comoros,Male -Wesley Good,72,Thailand,Female -Rebecca Bentley,78,Nicaragua,Female -Daniel Taylor,29,Western Sahara,Female -Madeline Fisher,46,Syrian Arab Republic,Male -Jeremy Davis,20,Jordan,Male -Samuel Torres,20,Sweden,Male -Sandra Lewis,66,Tuvalu,Female -Sean Nichols,75,Antigua and Barbuda,Female -Mark Ramirez,55,Kyrgyz Republic,Female -Jason Alexander,52,Bangladesh,Female -John Daugherty,41,Macao,Female -Kimberly Bowman,53,Solomon Islands,Female -Marcus Esparza,71,Lesotho,Female -Taylor Curry MD,24,Saint Barthelemy,Male -Sharon Livingston,34,Somalia,Male -Cheyenne Cunningham,62,Malawi,Female -Cristina Nelson,63,Turkey,Male -Cameron Brewer,69,Central African Republic,Male -Stephen Spencer,21,Guyana,Female -Shawn Wells,37,Malawi,Female -Amy Mccarthy,47,Saint Vincent and the Grenadines,Male -Matthew Smith,26,Bahamas,Male -Amber Walker,62,Guatemala,Male -Kristin Nunez,37,Tanzania,Female -Johnny Marshall,31,Nigeria,Male -Brian Mason,39,Tajikistan,Female -Abigail Cline,73,Cote d'Ivoire,Male -Kelly Jenkins,54,Luxembourg,Female -Kelsey Harris,23,Croatia,Female -Brandon Perry,63,Germany,Female -Philip Mitchell,43,Somalia,Female -Jon Jones,77,Romania,Female -Jessica Armstrong,51,Lao People's Democratic Republic,Female -Christian Martin Jr.,80,Jordan,Female -Jeffrey Coffey,73,Burkina Faso,Male -Amanda Davis,44,Mayotte,Male -Mary Braun,57,Eritrea,Female -Brandon Thomas,29,Russian Federation,Female -Erika Walker,75,Cape Verde,Female -Marc Berg,36,Samoa,Male -Calvin Howe,41,Swaziland,Male -Jesse Rodriguez,45,New Caledonia,Male -Paul Holt,26,Zambia,Female -Kathryn Jackson,60,Greece,Male -Leah Williamson,46,Isle of Man,Male -Julie Michael,33,Guadeloupe,Female -Roberto Hopkins,62,Kazakhstan,Female -Rebecca Roberson,30,Tuvalu,Female -Beverly Roberts,51,Mexico,Female -Peter Alexander,20,Latvia,Male -Gary Jackson,72,United States Minor Outlying Islands,Female -Vincent Reyes,20,India,Female -Daniel Turner,78,Iran,Female -Charles Valdez,47,Finland,Female -Jasmine Ryan,67,Lao People's Democratic Republic,Female -Tara Ramos,76,New Caledonia,Male -Joseph Rivers,37,Myanmar,Male -Amanda Walker,44,Brunei Darussalam,Male -Brandon Jordan,23,Costa Rica,Male -Jacqueline Davis,25,Sri Lanka,Male -Michelle Cohen,21,Argentina,Female -Willie Key,47,Lithuania,Female -Steven Santos,28,Svalbard & Jan Mayen Islands,Male -Veronica Dean,39,Guernsey,Female -Andrew Stafford,28,Niue,Female -Scott Santiago,74,British Indian Ocean Territory (Chagos Archipelago),Female -Anthony Kennedy,62,Israel,Male -Cassie Campbell,57,Tonga,Female -Veronica Bennett,59,Pitcairn Islands,Male -Aaron Joyce,80,Madagascar,Male -Brandi Solis,51,Jamaica,Female -Courtney Ellis,80,Lebanon,Male -Michael Krueger,65,Oman,Male -Amber White,77,Papua New Guinea,Male -Teresa Riley,60,Tanzania,Female -Jim Alvarez,50,Korea,Male -Cory Henry,63,Central African Republic,Male -John Brennan,55,Poland,Male -Maria Miles,51,Belize,Male -Melinda Rhodes,61,Bouvet Island (Bouvetoya),Female -Alexandra Mendoza,22,Portugal,Female -Mr. Cody Shelton DDS,23,Bhutan,Male -Ryan Black,51,Iceland,Female -Lisa Dean,68,Benin,Female -Hector Ross,58,Equatorial Guinea,Male -William Ellis,24,Jersey,Female -Jeffrey Hardy,33,Macedonia,Male -Emily Porter,70,Cayman Islands,Male -Jamie Myers,45,Papua New Guinea,Male -David Jackson,30,Jersey,Male -Hannah Martin,38,Tuvalu,Female -Cynthia Wilson,44,Canada,Male -Peggy Bruce,25,Mozambique,Female -Robert Jones,46,Finland,Male -Joseph Jenkins,20,Paraguay,Female -Donna Brown,26,Puerto Rico,Male -Michael Allen,30,Lebanon,Male -Susan Paul,50,Kyrgyz Republic,Female -Donna Carr,71,Gibraltar,Female diff --git a/data/raw/f_674_data_simon/test_data_1column.csv b/data/raw/f_674_data_simon/test_data_1column.csv deleted file mode 100644 index d9bb6e28..00000000 --- a/data/raw/f_674_data_simon/test_data_1column.csv +++ /dev/null @@ -1,2 +0,0 @@ -Name,Age,Genre,Height -Christopher Martinez,21,Rock,180 \ No newline at end of file diff --git a/data/raw/f_674_data_simon/test_data_2.csv b/data/raw/f_674_data_simon/test_data_2.csv deleted file mode 100644 index 6f1c3c2d..00000000 --- a/data/raw/f_674_data_simon/test_data_2.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Country,Gender -Karen Daniels,68,Cape Verde,Male -Mario Villarreal,74,Bahrain,Male -Christopher Moon,38,Tajikistan,Male -Derrick Jensen,26,Korea,Female -Kathryn Mclean,52,San Marino,Male -Carlos Branch,26,Suriname,Male -Patrick Fisher,63,Martinique,Male -Lindsay Perez,62,Haiti,Male -Todd Cox,60,Zimbabwe,Male -Blake Smith,52,Western Sahara,Female -Sherry Mcmahon,58,Mozambique,Female -Danny Ho,57,Micronesia,Male -Kimberly Mcmahon,77,Iceland,Female -Daniel Murillo,64,Cuba,Female -Eric Mack,78,Lesotho,Female -Albert Smith,30,Saint Kitts and Nevis,Male -Kelli Hughes,45,Kenya,Female -Paul Perkins,21,Hungary,Female -Anna Ford,77,Denmark,Female -Cynthia Doyle MD,34,Mali,Male -Christina Nguyen,42,Antigua and Barbuda,Female -Shane Aguirre,45,Samoa,Female -Daniel Barnes,53,Azerbaijan,Female -Melanie Shaw,38,Egypt,Female -Kristin Perry,40,Belgium,Female -Amanda Young,22,Saudi Arabia,Male -Rhonda Sanchez,65,Argentina,Male -Linda Rogers,56,Latvia,Female -George Bean,46,Slovakia (Slovak Republic),Male -Kelsey Zuniga,55,Belarus,Male -Christopher Ware,38,Germany,Male -Sarah Phelps,47,Macao,Female -Rachael Ryan,54,Liechtenstein,Male -Joshua Horn,35,Lesotho,Female -Albert Alexander,69,Congo,Female -Jennifer Bender,80,Fiji,Male -Jared Villegas,38,Central African Republic,Female -Richard Crawford,42,India,Female -Mrs. Nicole Hernandez,78,Greenland,Male -Thomas Owens,48,Papua New Guinea,Male -Jessica Cisneros,45,Albania,Female -Steven Soto,80,Sweden,Female -Jennifer Davis,44,Comoros,Female -Terry Reese,34,Solomon Islands,Female -Victor Turner,49,Mayotte,Female -Stephanie Martin,23,Czech Republic,Male -Donna Davis,39,Bulgaria,Female -Heather Beard,53,Mayotte,Male -Steven Smith,49,Germany,Male -James Romero,34,Afghanistan,Female -Tracy Mercado,22,Saint Vincent and the Grenadines,Female -Steven Woods,49,Kenya,Male -Kimberly Henderson,27,Mali,Male -Thomas Hernandez MD,59,Egypt,Female -Kimberly Shaw,37,Micronesia,Female -Travis Valdez,36,Pitcairn Islands,Male -Valerie Morgan,75,Kyrgyz Republic,Female -Megan Dunn,25,Haiti,Male -Laurie Richardson,38,Pakistan,Female -Wesley Davis,45,Falkland Islands (Malvinas),Female -Nathaniel Wolf,76,Albania,Male -John Hahn,27,Heard Island and McDonald Islands,Male -Connie West,53,Romania,Female -Juan Chavez,74,Niger,Male -Lynn Thompson,60,Macao,Male -Brian Cooper,35,Cook Islands,Female -Alexis Baxter,27,Malaysia,Male -Jessica Costa,69,Switzerland,Male -Mary Valenzuela,66,Finland,Female -Aaron Velasquez,43,Samoa,Female -Christopher Gallagher,23,Ukraine,Female -Katherine Fuentes,57,Turks and Caicos Islands,Male -Courtney Smith,62,Hong Kong,Female -Samantha Bender,54,Uzbekistan,Female -Eric Mckay,70,Northern Mariana Islands,Male -Laura Kim,58,Antigua and Barbuda,Female -Lisa Weber,47,Comoros,Male -Paula Stevenson,58,Moldova,Female -Bob George,47,Saint Kitts and Nevis,Female -Justin Deleon,58,Poland,Male -Brenda Guzman,80,Argentina,Female -Bailey Thomas,53,Djibouti,Female -Barbara Nichols,30,Costa Rica,Male -John Johnson,24,Angola,Male -David Banks,34,Dominica,Male -Derek Mills,27,Luxembourg,Male -Kimberly Rodriguez,70,Liberia,Male -Daniel Hayes,75,American Samoa,Female -Monica King,57,Egypt,Male -Christina Levine,29,Niger,Male -Robin Goodman,64,Rwanda,Male -Megan Cortez,36,France,Male -Courtney Lynch DDS,75,Dominica,Male -Jasmine Foster,21,Korea,Female -Dr. Tom Wilson,76,Ethiopia,Female -Pedro Perez,53,Croatia,Female -Rhonda Boyer,76,Norfolk Island,Female -Susan Horton,23,Sri Lanka,Male -Timothy Bray,69,Niger,Female -Alice Blevins,56,Burkina Faso,Male diff --git a/data/raw/f_674_data_simon/test_data_3.csv b/data/raw/f_674_data_simon/test_data_3.csv deleted file mode 100644 index bd4a7724..00000000 --- a/data/raw/f_674_data_simon/test_data_3.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Country,Gender -Luke Schroeder,42,Vanuatu,Female -Darrell Johnson,41,Togo,Female -Tanya Frye,34,Poland,Male -Lisa Klein,28,Cyprus,Male -Paul Fowler,44,Gabon,Female -William Kelley,23,Jamaica,Male -Ryan Garcia,55,Western Sahara,Female -Randall Thomas,53,Fiji,Male -Keith Wu Jr.,54,Mongolia,Female -Debbie Perez,50,Ecuador,Male -Patrick Clark,37,Armenia,Female -Kelly Moore,46,Aruba,Female -Ryan Jimenez,42,Bhutan,Male -Diane Wells,36,Sri Lanka,Male -Diane Collins,65,New Caledonia,Female -Anthony Reid,55,Lao People's Democratic Republic,Female -Vanessa Goodman,68,France,Male -Norma May,47,Samoa,Male -Raymond Drake,63,Mayotte,Male -Daisy Johnson,77,Antigua and Barbuda,Female -James Hubbard,30,Belarus,Male -Jonathan Nunez,39,Palestinian Territory,Male -Terry Weaver,72,Niger,Female -Seth Gibbs,32,Bulgaria,Female -Jennifer Hickman,21,Brunei Darussalam,Male -Sarah Young,40,Haiti,Female -Thomas Torres,56,Spain,Male -Brian Sanchez,42,Equatorial Guinea,Female -Marcus Lowe,67,Saint Lucia,Female -Christopher Klein,31,New Caledonia,Male -James Lewis,62,Tokelau,Male -Megan Robinson,62,Timor-Leste,Female -Amy Woods,38,Bulgaria,Female -Kenneth Harris,78,Burundi,Male -Sarah Williamson,69,Swaziland,Female -Terry Frazier,49,Congo,Male -Alex Wright,35,Micronesia,Female -Ricky Brown,25,Cameroon,Female -Glenn Mullen,54,Libyan Arab Jamahiriya,Female -Robert Weiss,24,Lithuania,Male -Garrett Davenport,25,Australia,Male -Pamela Vasquez,38,Albania,Male -Joshua Green,70,Faroe Islands,Female -April Byrd,65,Antigua and Barbuda,Male -Sophia Turner,54,Western Sahara,Female -Rachel Johnson,44,Zambia,Female -Audrey Wagner,69,Sudan,Female -Ashley Jones,43,Palestinian Territory,Male -Connor Bridges,20,Bangladesh,Female -Matthew Rodgers,76,Niger,Male -Justin Kelly,58,Palau,Female -Jamie Miller,22,South Africa,Female -Molly Ryan,31,Gambia,Male -Christina Ortiz,50,Saint Pierre and Miquelon,Female -Rhonda Gregory,32,Philippines,Female -Megan Hill,34,Madagascar,Female -Scott Garza,40,Gabon,Female -Janice Mason DDS,72,Tajikistan,Female -Melissa Perez,39,South Africa,Male -Ivan Smith,38,Turkey,Male -John Lee,66,Vietnam,Male -Anne Davis,79,Palestinian Territory,Female -Alexander Allen,38,Russian Federation,Female -Mario Nelson,67,India,Male -Manuel Carlson,76,Morocco,Male -Brianna Allen,77,Grenada,Male -Brian Guerra,72,Bahamas,Male -Cindy Baker,74,Tajikistan,Female -Brandon Walker,26,Central African Republic,Female -Lisa Moore,31,San Marino,Female -Benjamin Peterson,62,Bermuda,Male -Michael Golden,54,Liechtenstein,Male -Brent Barnes,62,Cape Verde,Male -David Jones,78,Vietnam,Male -Michael Leach,35,Greece,Male -Mark Malone,21,Saudi Arabia,Female -Dr. Daniel Williams,45,Seychelles,Female -John Clements,80,Nepal,Female -Brandy Arnold,55,New Caledonia,Male -John Dean,25,Colombia,Female -Dennis Gomez,43,Nepal,Female -Stephanie Snow,22,Nauru,Male -Michael Heath,74,Egypt,Female -Dustin Pham,69,Kuwait,Female -Alyssa Gardner,80,Equatorial Guinea,Female -Colton Gray,73,Gabon,Male -Mary Reyes,69,Barbados,Male -Elizabeth Wagner,67,Faroe Islands,Female -Jessica Hood,60,Suriname,Male -Erin Jackson,57,France,Male -John Cook,43,Martinique,Male -Ricky Rivera,33,Uruguay,Female -Debra Jones,62,Lebanon,Male -Cory Morris,23,Ecuador,Male -Justin Flores,71,Hong Kong,Male -Lisa French,23,Macao,Male -Caitlyn Juarez,75,Ecuador,Male -David Mason,79,El Salvador,Male -Dr. Stephanie Benton,62,Saint Lucia,Male -Michael Davenport,23,Iceland,Male diff --git a/data/raw/f_674_data_simon/test_data_4.csv b/data/raw/f_674_data_simon/test_data_4.csv deleted file mode 100644 index c52b1726..00000000 --- a/data/raw/f_674_data_simon/test_data_4.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Country,Gender -Christopher Martinez,21,Antigua and Barbuda,Female -Kelsey Miller,79,Macao,Male -Matthew Barrett,20,Eritrea,Female -Mary Ferrell,30,Saint Vincent and the Grenadines,Male -Andrew Cross,67,Gambia,Female -Brian Matthews,45,Sudan,Female -Ashley Robbins,62,Uganda,Male -Cory Russell,61,Togo,Female -Andrea Moss,25,Zambia,Female -Stacy Moore,38,Cyprus,Male -Dawn Rice,63,China,Male -Michael Dominguez,58,Dominican Republic,Male -Jordan Brown,36,Turkey,Female -Yvonne Stephens,56,Bulgaria,Male -Glenn Evans,65,French Polynesia,Male -Mark Dorsey,50,Tunisia,Male -Daniel Davis,62,Slovakia (Slovak Republic),Female -Katherine Ayala,66,Luxembourg,Male -Emily Charles,52,Korea,Male -Jill Martinez,27,Italy,Male -Lisa Reed,22,Nicaragua,Female -Sarah Edwards,64,Bhutan,Male -Amy Patterson,37,Albania,Female -Michael Poole,47,Sri Lanka,Male -Jerry Hall,22,Niue,Female -Michael Rogers,59,Malta,Female -Michelle Evans,77,Singapore,Male -Teresa Adams,78,United Kingdom,Female -Donald Frazier,26,Ecuador,Male -Misty Lambert,47,Heard Island and McDonald Islands,Male -Kyle Holder,29,Saint Barthelemy,Male -Brandon Whitehead,23,South Africa,Male -Christina Collins,57,Trinidad and Tobago,Male -Philip Barajas,73,Vanuatu,Female -Vanessa Holloway,44,United States of America,Male -Brenda Holmes,75,Eritrea,Female -Nicholas Potter,78,Cambodia,Female -Carol Wilson,31,Nigeria,Female -Amanda Henry,42,Armenia,Male -Alexis Mccoy,25,Reunion,Male -Albert Gordon,59,Faroe Islands,Female -Desiree Graves,34,French Southern Territories,Male -Mr. Austin Jackson,43,Canada,Male -John Ellis,47,Reunion,Female -Michael Mathews,41,United States Minor Outlying Islands,Female -Sierra Daniels,75,Gibraltar,Female -Miguel Moran,26,Comoros,Male -Michael Mcdaniel,23,Mozambique,Male -Roy Vargas,33,Trinidad and Tobago,Male -Mark Richard,21,Saint Lucia,Female -Holly Wilson,31,Kuwait,Male -Olivia Hall,56,Isle of Man,Female -Rachel Taylor,53,Russian Federation,Female -Edgar Marshall,53,Italy,Female -Lawrence Webb,25,Iraq,Female -David Cervantes,36,Liberia,Female -Erik Gonzalez,44,Cocos (Keeling) Islands,Female -Shannon Terry,51,Maldives,Female -Kimberly Butler,58,Albania,Male -Nichole Castillo,45,Comoros,Female -Phillip Holmes,59,Bangladesh,Female -Rebecca Boyd,76,Comoros,Female -John Johnston,55,Macedonia,Female -Jamie Cook,74,Guinea,Female -Janice Rivers,74,Christmas Island,Male -Mrs. Tiffany Garcia,45,Reunion,Female -Travis Alexander,76,Jersey,Female -Mrs. Bonnie Hernandez,73,Botswana,Male -Robert Rogers,31,United States Virgin Islands,Female -Beth Short,21,Guinea-Bissau,Male -Stephanie Campbell,62,Ghana,Female -Juan Olsen,26,Bolivia,Male -David Mcguire,64,Swaziland,Female -Rebecca Moore,26,Bosnia and Herzegovina,Female -Donna Flores,20,Lao People's Democratic Republic,Female -Meagan Sandoval,23,India,Male -Courtney Morgan,66,Jordan,Female -Peter Brown,60,Cote d'Ivoire,Male -Brian Meyer,43,Saudi Arabia,Female -Dylan Perez,61,Guadeloupe,Female -John Shaffer,38,Aruba,Female -Melissa Gomez,55,Rwanda,Female -Laura Dalton,26,British Indian Ocean Territory (Chagos Archipelago),Female -Gabriel Johnson,80,Saint Kitts and Nevis,Male -Jillian Sanders,50,Zambia,Female -Barbara Burns,20,Nicaragua,Female -Rebecca Bradshaw,75,Mauritania,Female -Dominique Daugherty,75,Greece,Male -Kevin Robbins,34,Italy,Female -Julie Bartlett,23,Comoros,Female -Jessica Contreras,53,Guernsey,Male -Joel Hart,66,Sao Tome and Principe,Female -David Ross,25,Chile,Male -David Powell,72,Jersey,Female -Brett Jones,64,Czech Republic,Male -Timothy Harrington,21,Qatar,Male -Paul Daugherty,26,Greece,Female -Keith Kennedy,38,Guinea-Bissau,Female -James Moore,77,Grenada,Female -Jasmine Austin,50,Namibia,Female diff --git a/data/raw/f_674_data_simon/test_data_empty.csv b/data/raw/f_674_data_simon/test_data_empty.csv deleted file mode 100644 index 9eef21cf..00000000 --- a/data/raw/f_674_data_simon/test_data_empty.csv +++ /dev/null @@ -1 +0,0 @@ -Name,Age,Country,Gender \ No newline at end of file diff --git a/data/raw/f_674_simon.py b/data/raw/f_674_simon.py deleted file mode 100644 index 3bb3e50e..00000000 --- a/data/raw/f_674_simon.py +++ /dev/null @@ -1,144 +0,0 @@ -import collections -import numpy as np - - -def f_674(file_name): - """ - Find the most common value in each column of a csv file with column names. - - If some values occur the same number of times, the values are sorted - alphabetically and the first is considered most common. - - If an empty csv is passed, an empty dictionary is returned. - - Parameters: - file_name (str): The name of the csv file. - - Returns: - dict: A dictionary with column names as keys and most common values as values. - - Requirements: - - collections - - numpy - - Example: - >>> common_values = f_674('sample.csv') - >>> print(common_values) - {'Name': 'Simon Velasquez', - 'Age': 21, - 'Fruit': 'Apple', - 'Genre': 'HipHop', - 'Height': 172} - - >>> common_values = f_674('test.csv') - >>> print(common_values) - {'Object': 'Chair', - 'Weight': '211kg', - 'Dancing Style': 'Waltz',} - """ - data = np.genfromtxt(file_name, delimiter=',', names=True, - dtype=None, encoding=None) - common_values = {} - - if len(np.atleast_1d(data)) == 0: - return {} - - if len(np.atleast_1d(data)) == 1: - for col in data.dtype.names: - common_values[col] = data[col].item() - - else: - for col in data.dtype.names: - counter = collections.Counter(data[col]) - if counter.most_common(2)[0][1] == counter.most_common(2)[1][1]: - common_values[col] = sorted(counter.items())[0][0] - else: - common_values[col] = counter.most_common(1)[0][0] - - return common_values - -import unittest -import os - - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -class TestCases(unittest.TestCase): - - def test_empty(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_empty.csv')) - self.assertEqual(result, {}) - - def test_1_entry(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_1column.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Genre', 'Height']) - self.assertEqual(result['Name'], 'Christopher Martinez') - self.assertEqual(result['Age'], 21) - self.assertEqual(result['Genre'], 'Rock') - self.assertEqual(result['Height'], 180) - - def test_case_1(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_0.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Country', 'Gender']) - self.assertEqual(result['Name'], 'Adam King') - self.assertEqual(result['Age'], 21) - self.assertEqual(result['Country'], 'Korea') - self.assertEqual(result['Gender'], 'Male') - - def test_case_2(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_1.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Country', 'Gender']) - self.assertEqual(result['Name'], 'Aaron Joyce') - self.assertEqual(result['Age'], 20) - self.assertEqual(result['Country'], 'Tuvalu') - self.assertEqual(result['Gender'], 'Female') - - def test_case_3(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_2.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Country', 'Gender']) - self.assertEqual(result['Name'], 'Aaron Velasquez') - self.assertEqual(result['Age'], 21) - self.assertEqual(result['Country'], 'Afghanistan') - self.assertEqual(result['Gender'], 'Female') - - def test_case_4(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_3.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Country', 'Gender']) - self.assertEqual(result['Name'], 'Alex Wright') - self.assertEqual(result['Age'], 62) - self.assertEqual(result['Country'], 'Albania') - self.assertEqual(result['Gender'], 'Male') - - def test_case_5(self): - result = f_674(os.path.join('f_674_data_simon', 'test_data_4.csv')) - self.assertIsInstance(result, dict) - self.assertEqual(len(result), 4) - - self.assertCountEqual(result.keys(), ['Name', 'Age', 'Country', 'Gender']) - self.assertEqual(result['Name'], 'Albert Gordon') - self.assertEqual(result['Age'], 26) - self.assertEqual(result['Country'], 'Comoros') - self.assertEqual(result['Gender'], 'Female') - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_674_simon_chien_edit.py b/data/raw/f_674_simon_chien_edit.py new file mode 100644 index 00000000..808e5cd8 --- /dev/null +++ b/data/raw/f_674_simon_chien_edit.py @@ -0,0 +1,126 @@ +import collections +import numpy as np + + +def f_674(file_name): + """ + Find the most common value in each column of a csv file with column names. + + If some values occur the same number of times, the values are sorted + alphabetically and the first is considered most common. + + If an empty csv is passed, an empty dictionary is returned. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + dict: A dictionary with column names as keys and most common values as values. + + Requirements: + - collections + - numpy + + Example: + >>> common_values = f_674('sample.csv') + >>> print(common_values) + {'Name': 'Simon Velasquez', + 'Age': 21, + 'Fruit': 'Apple', + 'Genre': 'HipHop', + 'Height': 172} + """ + data = np.genfromtxt(file_name, delimiter=',', names=True, + dtype=None, encoding=None) + common_values = {} + + if len(np.atleast_1d(data)) == 0: + return {} + + if len(np.atleast_1d(data)) == 1: + for col in data.dtype.names: + common_values[col] = data[col].item() + + else: + for col in data.dtype.names: + counter = collections.Counter(data[col]) + if counter.most_common(2)[0][1] == counter.most_common(2)[1][1]: + common_values[col] = sorted(counter.items())[0][0] + else: + common_values[col] = counter.most_common(1)[0][0] + + return common_values + + +import unittest +import os +import shutil +import tempfile +import csv + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to house the CSV files + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_csv(self, file_name, headers, data): + # Helper function to create a CSV file + path = os.path.join(self.test_dir, file_name) + with open(path, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=headers) + writer.writeheader() + for row in data: + writer.writerow(row) + return path + + def test_empty_csv(self): + # Test for an empty CSV file + file_path = self.create_csv('empty.csv', ['Name', 'Age'], []) + result = f_674(file_path) + self.assertEqual(result, {}) + + def test_single_entry(self): + # Test for a CSV file with a single entry + file_path = self.create_csv('single.csv', ['Name', 'Age'], [{'Name': 'John', 'Age': '30'}]) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'John', 'Age': 30}) + + def test_common_values_sorted(self): + # Test for common values, ensuring alphabetical sorting + file_path = self.create_csv('common_values.csv', ['Fruit'], [{'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Apple'}, {'Fruit': 'Banana'}, {'Fruit': 'Cherry'}]) + result = f_674(file_path) + self.assertEqual(result, {'Fruit': 'Apple'}) + + def test_multiple_columns(self): + # Test for multiple columns and entries + data = [{'Name': 'Alice', 'Age': '25', 'Country': 'USA'}, + {'Name': 'Bob', 'Age': '30', 'Country': 'USA'}, + {'Name': 'Alice', 'Age': '25', 'Country': 'Canada'}] + file_path = self.create_csv('multi_columns.csv', ['Name', 'Age', 'Country'], data) + result = f_674(file_path) + expected = {'Name': 'Alice', 'Age': 25, 'Country': 'USA'} + self.assertEqual(result, expected) + + def test_tie_breaking(self): + # Test for tie-breaking in value counts + data = [{'Name': 'Alice'}, {'Name': 'Bob'}, {'Name': 'Alice'}, {'Name': 'Bob'}] + file_path = self.create_csv('tie.csv', ['Name'], data) + result = f_674(file_path) + self.assertEqual(result, {'Name': 'Alice'}) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_675_data_simon/test_data_0.csv b/data/raw/f_675_data_simon/test_data_0.csv deleted file mode 100644 index eaa23834..00000000 --- a/data/raw/f_675_data_simon/test_data_0.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Salary -Olivia Anderson,20,77811 -Mr. Joshua Casey,16,47695 -Anthony Floyd,95,68274 -Brian Gallegos,64,42001 -Spencer Lawrence,40,49725 -Tracey Sandoval,74,84123 -Michael Stout,49,53303 -Julian Matthews,94,82287 -Dustin Ortiz,70,35604 -Ashley Murray,66,77474 -Austin Goodman,63,63090 -Erica Brown,52,59331 -Alexandria Smith,13,82214 -Roberta Ruiz,66,52643 -Charles Anderson,27,78711 -Ashley Wang,57,53031 -Kelly Padilla,27,51819 -Joseph Meyer,53,74036 -Charles Brown,16,30496 -Edward Conley,71,84244 -Tyler Jacobs,60,47357 -Allen Edwards,15,41330 -Cynthia Flores,73,74144 -Ryan Alvarado,86,51195 -Cheryl Short,40,38499 -John Sanford,28,82574 -Christopher Kidd,53,76502 -Sherri Ali,97,75188 -Roberta Jones,96,38142 -Todd Norman,29,72216 -Stephanie Gutierrez,69,44665 -Faith Roy,55,33814 -Victor Neal,82,46426 -Janet Williams,64,84327 -Shane Whitehead,76,46136 -Richard Russell Jr.,45,36020 -Christopher White,82,45140 -Christopher Richards,14,57405 -Barbara Butler,47,83323 -Brianna Lopez,63,86113 -Kim Malone,43,40750 -Mandy Bird,17,48210 -Susan Robertson,81,72068 -Roger Smith,62,74964 -Thomas Chandler,66,38686 -Timothy Parker,38,77435 -Chad Mann,22,82898 -Shannon Miller,70,65295 -Megan Castillo,67,74097 -Eric Watts,47,44721 -Eric Donaldson,65,40501 -Theodore White,29,67435 -Rachel Barr,19,57347 -Kathleen Mays,68,49490 -Victor Turner,54,48930 -Jeffrey Myers,64,42700 -Michelle Norris,37,67541 -Christopher Hayes,90,87350 -Eric Miller,95,40148 -Heather Lopez,76,39924 -Emily Bray,36,42208 -Trevor Rogers,65,73922 -Valerie Campbell,42,62247 -Danielle Bond,72,88036 -Glenda Macias,50,86040 -Tonya Paul,90,33269 -Derrick Sandoval,64,44131 -Pamela Morales,26,50471 -Barbara Elliott,67,79237 -Andrew Schroeder,84,63695 -Eileen Anderson,89,39745 -Virginia Mckee,71,71386 -Jenna Johnson,61,76556 -Adam Day,91,39764 -Cheryl Perez,80,50706 -Matthew Ortiz,89,37095 -Jamie Sims,96,86553 -Megan Cole,72,65817 -Natasha Stanley,96,50327 -Joseph Chambers,44,78654 -Jordan Patton,92,75156 -Jennifer Martinez,92,67642 -Mr. Timothy Watson,95,69507 -Gabriel Bender,44,85075 -Michael Wheeler,11,37010 -Brian Landry,75,46393 -Theresa Simon,59,37355 -Mark Hansen,42,82449 -James Turner,43,68627 -Morgan Lewis,27,35766 -Anthony Young,64,36649 -James Rivera,47,81295 -William Jones,19,54233 -Mckenzie White,17,40608 -Patrick Wiggins,82,65090 -Sarah Nelson,76,82995 -Gregory Contreras,77,81534 -Lawrence Rowland,67,73174 -Gabriella Banks,21,69861 -Logan Brown,84,73054 diff --git a/data/raw/f_675_data_simon/test_data_1.csv b/data/raw/f_675_data_simon/test_data_1.csv deleted file mode 100644 index 70590bfa..00000000 --- a/data/raw/f_675_data_simon/test_data_1.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Salary -Logan Martinez,29,47294 -Travis Copeland,71,88216 -Ashley Jennings,84,58530 -Mark Horton,16,32767 -Zachary Hernandez,56,53537 -James Peters,23,76624 -Chelsea Rodriguez,80,33804 -Angela Grimes,49,81983 -Rebecca Herrera,44,65211 -Dawn Flynn,90,89991 -Jennifer Thompson,99,43340 -Robert Williams,65,43417 -Michael Morrow,90,87195 -Ian Short,11,66032 -Ms. Victoria Vaughn,14,47212 -Steven Joseph,81,46286 -Amber Mcintosh,81,32481 -Erin Robinson,37,58362 -Donald Blackwell,84,35945 -Connor Martinez,81,85790 -Heather Rodriguez,88,85727 -David Mathews,14,79885 -Alexis Taylor,78,67842 -Heather Rodriguez,66,87321 -Penny Clements,80,34104 -Michael Shaw,57,79926 -Brandi Lopez,46,88113 -Caitlin Phelps,80,36429 -Renee Johnson,98,78156 -Joseph Burgess,98,43916 -Jason Mitchell,16,80930 -Brian Sawyer,23,61655 -Jordan Jones,39,79174 -Scott Gross,77,48599 -Holly Johnson,48,89260 -Alicia Johnson,26,54740 -Kenneth Ayala,12,53947 -David Brown,21,56520 -John Wagner,20,45794 -Melissa Wood,12,72104 -Joel Larson,23,75025 -Regina Aguirre,88,68231 -Rose Dean,70,48549 -Robert Munoz,92,76471 -Tony Herman,64,56549 -Laura Williamson,96,58065 -Rachel Ramirez,75,38642 -Anthony Mcmillan,34,55942 -Kelly Tucker,96,32868 -Jonathan Myers,49,56529 -Carrie Le,16,85557 -Diane Park,30,37579 -Melissa Rodriguez,54,33342 -Justin Frost,52,58149 -Margaret May,31,76993 -Laura Williams,51,70715 -Jeffery Mills,75,46052 -Joshua Roberts,39,73501 -Christina Valencia,42,71316 -John Underwood,36,81804 -Rebecca Marshall,71,37090 -Ashley Hoffman,90,58460 -Kelsey Dorsey DDS,88,32089 -Jacqueline Flores,66,66157 -Gabriela Cruz,98,86453 -Benjamin Ryan,39,89175 -Mark Peterson,41,44161 -Carrie Barnett,48,30087 -Crystal Wright,43,45603 -Stephanie Hull,66,56601 -Jonathan Tyler,89,74128 -Randy Tate,26,32160 -Jennifer Bell,68,80486 -Mr. Bryan Bryant,12,71627 -Gina Jordan,64,37706 -Kenneth Garcia,78,50856 -Steven Flynn,53,78057 -Rachael Mendoza,93,62064 -Raymond Morrow,40,47132 -Megan White,15,52899 -Nicholas Ruiz,55,48756 -Matthew Kelly,73,85021 -Curtis Martin,97,52174 -Anna Herman,84,84679 -Brian Ford,82,89099 -Michael Sutton,18,80900 -Stacey Ruiz,47,75380 -Amy Sanchez,66,52993 -Kyle Fox,32,39727 -Mr. Michael Harrington,89,71299 -David Stark,13,78843 -Brian Nichols,42,70341 -Paul Ferguson,83,87237 -April Williams,48,38810 -Jack Jarvis,96,80187 -Charles Lewis,44,42090 -James Martinez,12,67462 -Victoria Martinez,31,68152 -Dawn Robinson,68,74727 -Susan Richardson,90,54267 diff --git a/data/raw/f_675_data_simon/test_data_2.csv b/data/raw/f_675_data_simon/test_data_2.csv deleted file mode 100644 index ed732c39..00000000 --- a/data/raw/f_675_data_simon/test_data_2.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Salary -Jessica Valdez,13,69017 -Justin Hall,70,79375 -James Williams,87,75997 -Monica Hunter,47,55498 -Christopher Martin,36,82778 -Christine Smith,61,33726 -Emily Rasmussen,32,88507 -William Melton,10,37582 -Jackie Roberson,11,53084 -Howard Taylor,23,51833 -Nicole Dunn,69,60853 -Anna Franklin,20,80772 -Jeffrey Davis,65,57291 -Donald Stone,64,66542 -Emily Huang,46,32051 -Paula Sexton,15,78845 -Lauren Carter,92,52917 -Wendy Sullivan,79,60970 -Mrs. Nancy Larson,12,36245 -Kimberly Gaines,13,75548 -Michael Chandler,69,40208 -Jordan Hall,65,62392 -Jeffrey Franklin,59,72504 -Brian Adams,38,60868 -Michael Holloway,84,79807 -Tami Rodriguez,71,59347 -Dr. Adam Owen,54,79108 -Kevin Bradley,28,45622 -Connor White,23,42532 -Timothy Carney,87,89705 -Sandra Murphy,81,60575 -Rhonda Castillo,25,48061 -Gabriel Mccall,37,76039 -Monica Peterson,24,69588 -Alicia Hurley,33,78897 -Rebecca Vincent PhD,50,58119 -David Patrick,23,82129 -Tanya Cuevas,78,62819 -Christopher Davis,79,66284 -Robert Nichols,38,47244 -Adam Ross,53,71987 -Lauren Beard,97,84256 -Richard Moore,17,52184 -Jordan Mitchell,33,38915 -Abigail Mitchell,99,89978 -Joshua Bailey,80,53670 -William Mathis,47,47797 -Courtney Padilla,29,34149 -Joshua Gardner,64,67026 -Kevin Pena,54,83476 -Alexander Summers,85,89740 -Anna Jarvis,17,41181 -Nicholas Adkins,25,51529 -Michael George,61,56288 -Kristi Carroll,45,65002 -Kimberly Preston,38,42069 -Jennifer Williams,76,50188 -Walter Patterson,49,54310 -Crystal Diaz MD,80,38773 -Tammy Peters,77,87263 -Diana Valencia,74,45062 -Alan Sloan,63,88067 -Clayton Kelley,74,69500 -Brian Johnston,32,49980 -Jack Benson,10,81658 -Dana Caldwell,58,37107 -Natasha Lane,61,44108 -Jonathan Lawson,29,49105 -Crystal Murray,29,78216 -Sergio Pennington,29,83708 -Leslie Martinez,40,30884 -Susan Wallace,20,57096 -Samantha Lang,36,71994 -Cody Moore,14,77172 -Angela Harris,15,32890 -Karen Holt DDS,77,36954 -Lisa Snyder,57,41641 -Tracy Price,70,36450 -Antonio Ibarra,23,70408 -Jacob Maxwell,47,41230 -Jeffrey Dominguez,28,48229 -Jeffrey White,70,63729 -Melissa Bennett,22,67856 -Ashlee Grimes,34,77440 -Brian Torres,38,54254 -Robert Stewart,68,42998 -Anne Sanchez,72,77537 -Meghan Robinson,19,88482 -Jennifer Brown,60,37666 -Mary Reeves,60,61306 -Kelly Marsh,10,59230 -Manuel Lewis,32,81926 -Hannah Jones,65,72151 -David Clarke,24,51014 -Jeffery Garza,15,51827 -Ana Welch,23,30326 -Joshua Jordan,74,79399 -Daniel Gutierrez,95,44286 -Kimberly Roberts,60,67289 -Michele Martin,62,83242 diff --git a/data/raw/f_675_data_simon/test_data_3.csv b/data/raw/f_675_data_simon/test_data_3.csv deleted file mode 100644 index b2d74e6a..00000000 --- a/data/raw/f_675_data_simon/test_data_3.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Salary -Zachary French,78,81712 -Rachel Lee,37,41722 -John Sanchez,80,77677 -Alexandra Collins,20,63315 -Christopher Johnson,18,52712 -Angie Butler,85,85203 -Paul Harding,75,74076 -Jonathan Taylor,48,67749 -Michael Ford,35,74630 -Jason King,34,71733 -Courtney Palmer,74,83729 -Eric Scott,20,77448 -Belinda Armstrong,86,35316 -Tracy Estrada,48,70823 -Richard Garcia,66,70133 -Lucas Strong,29,76430 -Joseph Caldwell,87,48018 -Tiffany Garcia,45,79356 -Steven Jackson,98,88194 -Gabriel Scott,46,51982 -Emily Young,54,36775 -Jamie Jacobson,92,70871 -Hunter Mcclure,61,49881 -Stephanie Andrews,65,68438 -Denise Ingram,27,58835 -Ashley Davis,30,74229 -Veronica Morales,92,78963 -Gary Colon,52,84669 -Jacob White,65,34670 -Jennifer Peterson,17,49478 -Amber Brewer,59,39302 -Tracy Thomas,32,58883 -Jimmy Kim,70,74444 -Adrian Ward,93,33606 -Gloria Haynes,49,57099 -Mary Mendoza,65,70805 -Joshua Robinson,20,83812 -Karen Walters,79,54978 -William Collins,44,42080 -Connie Newman,19,65135 -Lucas Davis,85,56702 -Dylan Smith,32,33405 -Courtney Sanchez,98,84714 -Craig Villa,54,84622 -Eric Madden,32,89833 -Katrina Roberts,47,43998 -Alicia Hebert,77,59241 -John Lee,95,47376 -Russell Graves,24,59141 -Justin Wall,70,64565 -Paula Bowers,93,55895 -Joseph Pena,84,38624 -Wendy Vaughan,63,74102 -Rachel Martinez,41,66461 -James Reyes,75,31278 -Erica Brooks,24,61427 -Kenneth Owens,40,89289 -Angela Estrada,70,38205 -Ronald Mccarthy,85,42287 -Paul Lane,68,83036 -Don Roberts,86,77151 -Michael Franklin,88,38335 -Chelsea Ward,52,34894 -Christopher Cooper,10,32629 -Jacob Dean,76,33504 -Heather Obrien,23,87800 -Amanda Mcbride,25,41739 -Benjamin Day,18,66624 -Jeffrey Stone,26,43753 -Margaret Garcia,21,37845 -Corey Orr,21,71042 -Brian Jones,61,60522 -Gabriela Hernandez,26,59177 -Jonathan Reed,30,40145 -Carolyn Lewis,62,49523 -Jon Hester,70,76387 -Melissa Neal,57,56724 -Kenneth Patel,65,53103 -Darryl Martin,80,34274 -Victor Martin,44,31631 -Andre Jones,99,45387 -Robert Lee,17,55560 -Lisa Deleon,14,50551 -James Krueger,63,88057 -Joyce Garcia,97,40760 -Tiffany Ayala,94,30185 -Jonathan Thompson,13,41353 -Jennifer Aguirre,52,51097 -Hannah Weiss,84,44239 -Laura Woods,24,33585 -Ivan Deleon,63,53803 -Adrian Hansen,76,89684 -Mary Duncan,69,43609 -Dakota Price,64,58033 -Tasha Hodges,83,52477 -Timothy Lewis,10,37584 -Susan Roberts,76,79908 -Renee Lam,13,69759 -Leslie Hebert,52,48492 -Rebecca Glover,24,76043 diff --git a/data/raw/f_675_data_simon/test_data_4.csv b/data/raw/f_675_data_simon/test_data_4.csv deleted file mode 100644 index f74af4f5..00000000 --- a/data/raw/f_675_data_simon/test_data_4.csv +++ /dev/null @@ -1,101 +0,0 @@ -Name,Age,Salary -Shawn Fry,50,32727 -Megan Meyer,58,40297 -Micheal Calhoun,84,85596 -Taylor Horne,23,40992 -Jessica Anderson,88,68478 -Jeffrey Colon,28,48860 -Samantha King,27,44257 -Jennifer Paul,99,77466 -Ellen Cooper,27,88606 -Benjamin Jones,73,46090 -William Wilson,57,66146 -Bill Reyes,26,86501 -Patricia Wilkerson,15,33024 -Stephen Davis,91,39265 -Haley Rodriguez,73,86851 -Bryce Flynn,94,73702 -Michael Scott,59,78320 -David Hall,81,75990 -Robert Juarez,84,41063 -Brittany Shelton,75,69030 -Lindsey Richardson,63,69624 -James Nguyen,17,30463 -Amber Beltran,71,73171 -Deanna Ramirez,41,70205 -Kristine Espinoza,11,33933 -Vanessa Coffey,77,79247 -Kimberly Grimes,17,64191 -Patricia Boyle,65,33637 -Scott Michael,89,46425 -Joanna Hall,18,83439 -Jordan Anderson,48,70177 -Briana Macias,23,48960 -Jason Graham,49,70365 -Eric Mooney,82,86566 -Walter Andrade,86,38396 -Claudia Davis,92,45599 -Emily Mann,92,63379 -Steven Vargas,10,49162 -Catherine Stone,68,74923 -Gina Roberts,15,50352 -Jeremy Evans,14,40710 -Gregory Ellis,78,70605 -Carl Jacobson,73,45005 -Timothy Quinn,70,38130 -David Orr,66,30654 -Tracy Gallegos,44,84685 -Anna Freeman,31,88693 -Brian Lee,59,86321 -Brandon Simmons,28,54429 -Heather Jones,47,50876 -Debbie Garcia,60,35754 -Gary Munoz,18,36892 -Dennis Phillips,63,35712 -Paul Ashley,86,86944 -Jon Brown,75,37997 -Tracey Webb,32,75964 -Michelle Love,39,65875 -Matthew Mayer,76,52368 -David Robinson,16,38347 -Heather Morgan,64,66170 -Tracey Greer,48,78233 -Monica Rivas,46,87990 -Elizabeth Jimenez,76,60002 -Benjamin Bautista,29,42498 -Mrs. Megan Peters DVM,64,65220 -Alexander Kennedy,80,89212 -Tony Hart,58,49180 -Daniel Burton,12,86816 -Allison Morgan,16,65167 -Elizabeth Roberts,93,77476 -Logan Moreno,82,81463 -Nicholas Wilkins,14,45723 -Sophia Charles,56,66945 -Bruce Alvarez,21,59904 -James Smith,53,54994 -Brittany Warren,95,82260 -Jessica Howard,54,69926 -Beverly Mendoza,70,58657 -Rebecca Miller,74,40616 -Andrew Fuentes,82,60854 -Pamela Manning,39,53461 -Angela Rasmussen,42,81042 -Julie Whitney,80,87389 -Nicholas Delgado,86,31235 -William Peters,61,67671 -Laura Short DVM,49,76789 -Hayden Johnson,29,52787 -Janice King,53,75762 -Cheryl Ramos,27,41826 -Nicholas Harris,71,46604 -Zachary Fields,80,40208 -Sara Duran,47,60924 -Elizabeth Miller,79,31718 -Timothy Frank,96,82801 -James White,35,33337 -Deborah Jackson,49,39165 -Kimberly Henry,46,44111 -Robin Adams,96,59455 -Cameron Bennett,18,60365 -Danielle Kelly,16,62127 diff --git a/data/raw/f_675_data_simon/test_empty.csv b/data/raw/f_675_data_simon/test_empty.csv deleted file mode 100644 index b4432d12..00000000 --- a/data/raw/f_675_data_simon/test_empty.csv +++ /dev/null @@ -1 +0,0 @@ -Name,Age,Salary diff --git a/data/raw/f_675_data_simon/test_non_numeric.csv b/data/raw/f_675_data_simon/test_non_numeric.csv deleted file mode 100644 index 268405f4..00000000 --- a/data/raw/f_675_data_simon/test_non_numeric.csv +++ /dev/null @@ -1,3 +0,0 @@ -Name,Genre -Simon, Rock -Angelor, Metal \ No newline at end of file diff --git a/data/raw/f_675_data_simon/test_res_0.csv b/data/raw/f_675_data_simon/test_res_0.csv deleted file mode 100644 index eaecb09d..00000000 --- a/data/raw/f_675_data_simon/test_res_0.csv +++ /dev/null @@ -1,101 +0,0 @@ -,Name,Age,Salary -0,Olivia Anderson,0.10465116279069767,0.8222975321515467 -1,Mr. Joshua Casey,0.05813953488372092,0.298905109489051 -2,Anthony Floyd,0.9767441860465116,0.6565519638512338 -3,Brian Gallegos,0.6162790697674418,0.19994786235662143 -4,Spencer Lawrence,0.3372093023255814,0.3341849148418491 -5,Tracey Sandoval,0.7325581395348837,0.9319951338199512 -6,Michael Stout,0.4418604651162791,0.39636774417796306 -7,Julian Matthews,0.9651162790697674,0.9000868960722974 -8,Dustin Ortiz,0.686046511627907,0.08877302745915883 -9,Ashley Murray,0.6395348837209303,0.816440736878693 -10,Austin Goodman,0.6046511627906976,0.5664581160931526 -11,Erica Brown,0.4767441860465116,0.5011296489398678 -12,Alexandria Smith,0.023255813953488358,0.8988182134167535 -13,Roberta Ruiz,0.6395348837209303,0.3848974626346888 -14,Charles Anderson,0.18604651162790697,0.8379388251651025 -15,Ashley Wang,0.5348837209302325,0.3916405978449774 -16,Kelly Padilla,0.18604651162790697,0.3705769899200556 -17,Joseph Meyer,0.4883720930232558,0.7566909975669098 -18,Charles Brown,0.05813953488372092,0.0 -19,Edward Conley,0.6976744186046512,0.9340980187695516 -20,Tyler Jacobs,0.5697674418604651,0.2930309350017378 -21,Allen Edwards,0.046511627906976744,0.18828640945429265 -22,Cynthia Flores,0.7209302325581395,0.7585679527285365 -23,Ryan Alvarado,0.872093023255814,0.35973236009732357 -24,Cheryl Short,0.3372093023255814,0.1390858533194299 -25,John Sanford,0.19767441860465118,0.9050747306221758 -26,Christopher Kidd,0.4883720930232558,0.7995481404240528 -27,Sherri Ali,1.0,0.7767118526242613 -28,Roberta Jones,0.9883720930232558,0.13288147375738613 -29,Todd Norman,0.20930232558139533,0.7250608272506083 -30,Stephanie Gutierrez,0.6744186046511628,0.24624608967674655 -31,Faith Roy,0.5116279069767442,0.0576642335766423 -32,Victor Neal,0.8255813953488372,0.2768508863399374 -33,Janet Williams,0.6162790697674418,0.9355404935696905 -34,Shane Whitehead,0.7558139534883721,0.27181091414668057 -35,Richard Russell Jr.,0.39534883720930236,0.09600278067431345 -36,Christopher White,0.8255813953488372,0.2545012165450121 -37,Christopher Richards,0.034883720930232565,0.4676572818908584 -38,Barbara Butler,0.41860465116279066,0.9180917622523461 -39,Brianna Lopez,0.6046511627906976,0.9665797705943691 -40,Kim Malone,0.37209302325581395,0.17820646506777893 -41,Mandy Bird,0.0697674418604651,0.30785540493569685 -42,Susan Robertson,0.813953488372093,0.7224887035106011 -43,Roger Smith,0.5930232558139534,0.7728189085853319 -44,Thomas Chandler,0.6395348837209303,0.14233576642335766 -45,Timothy Parker,0.313953488372093,0.8157629475147723 -46,Chad Mann,0.12790697674418605,0.9107055961070558 -47,Shannon Miller,0.686046511627907,0.6047792839763642 -48,Megan Castillo,0.6511627906976744,0.7577511296489399 -49,Eric Watts,0.41860465116279066,0.24721932568647897 -50,Eric Donaldson,0.627906976744186,0.17387904066736182 -51,Theodore White,0.20930232558139533,0.641970802919708 -52,Rachel Barr,0.09302325581395349,0.46664928745220713 -53,Kathleen Mays,0.6627906976744186,0.33010079944386506 -54,Victor Turner,0.5,0.32036843934654147 -55,Jeffrey Myers,0.6162790697674418,0.21209593326381648 -56,Michelle Norris,0.3023255813953488,0.6438129996524156 -57,Christopher Hayes,0.9186046511627908,0.9880778588807784 -58,Eric Miller,0.9767441860465116,0.167744177963156 -59,Heather Lopez,0.7558139534883721,0.16385123392422662 -60,Emily Bray,0.2906976744186046,0.2035453597497393 -61,Trevor Rogers,0.627906976744186,0.7547097671185261 -62,Valerie Campbell,0.36046511627906974,0.5518074383037885 -63,Danielle Bond,0.7093023255813953,0.9999999999999998 -64,Glenda Macias,0.4534883720930233,0.9653110879388251 -65,Tonya Paul,0.9186046511627908,0.04819256169621133 -66,Derrick Sandoval,0.6162790697674418,0.23696558915537014 -67,Pamela Morales,0.17441860465116277,0.3471498088286409 -68,Barbara Elliott,0.6511627906976744,0.8470802919708029 -69,Andrew Schroeder,0.8488372093023255,0.5769725408411539 -70,Eileen Anderson,0.9069767441860466,0.1607403545359749 -71,Virginia Mckee,0.6976744186046512,0.7106360792492179 -72,Jenna Johnson,0.5813953488372093,0.800486618004866 -73,Adam Day,0.9302325581395348,0.1610705596107056 -74,Cheryl Perez,0.8023255813953488,0.3512339242266249 -75,Matthew Ortiz,0.9069767441860466,0.11468543621828287 -76,Jamie Sims,0.9883720930232558,0.9742266249565519 -77,Megan Cole,0.7093023255813953,0.6138512339242266 -78,Natasha Stanley,0.9883720930232558,0.3446472019464719 -79,Joseph Chambers,0.38372093023255816,0.8369482099409107 -80,Jordan Patton,0.941860465116279,0.776155717761557 -81,Jennifer Martinez,0.941860465116279,0.6455683003128259 -82,Mr. Timothy Watson,0.9767441860465116,0.6779805352798052 -83,Gabriel Bender,0.38372093023255816,0.9485401459854013 -84,Michael Wheeler,0.0,0.11320820298922485 -85,Brian Landry,0.7441860465116279,0.2762773722627737 -86,Theresa Simon,0.5581395348837209,0.11920403197775453 -87,Mark Hansen,0.36046511627906974,0.9029023288147375 -88,James Turner,0.37209302325581395,0.6626868265554395 -89,Morgan Lewis,0.18604651162790697,0.09158846020159883 -90,Anthony Young,0.6162790697674418,0.106934306569343 -91,James Rivera,0.41860465116279066,0.8828467153284671 -92,William Jones,0.09302325581395349,0.41253041362530407 -93,Mckenzie White,0.0697674418604651,0.17573861661452894 -94,Patrick Wiggins,0.8255813953488372,0.6012165450121654 -95,Sarah Nelson,0.7558139534883721,0.9123913799096279 -96,Gregory Contreras,0.7674418604651163,0.8870003475842891 -97,Lawrence Rowland,0.6511627906976744,0.7417101147028153 -98,Gabriella Banks,0.11627906976744184,0.6841327771984707 -99,Logan Brown,0.8488372093023255,0.7396246089676746 diff --git a/data/raw/f_675_data_simon/test_res_1.csv b/data/raw/f_675_data_simon/test_res_1.csv deleted file mode 100644 index 7e5ef9df..00000000 --- a/data/raw/f_675_data_simon/test_res_1.csv +++ /dev/null @@ -1,101 +0,0 @@ -,Name,Age,Salary -0,Logan Martinez,0.20454545454545453,0.28724292200854706 -1,Travis Copeland,0.6818181818181819,0.9703692574786325 -2,Ashley Jennings,0.8295454545454546,0.4748096955128206 -3,Mark Horton,0.05681818181818182,0.04473824786324787 -4,Zachary Hernandez,0.5113636363636364,0.39145966880341887 -5,James Peters,0.13636363636363635,0.7768596420940173 -6,Chelsea Rodriguez,0.7840909090909092,0.062049278846153855 -7,Angela Grimes,0.4318181818181819,0.8663194444444444 -8,Rebecca Herrera,0.375,0.586338141025641 -9,Dawn Flynn,0.8977272727272727,1.0 -10,Jennifer Thompson,1.0,0.22123731303418803 -11,Robert Williams,0.6136363636363636,0.22252270299145305 -12,Michael Morrow,0.8977272727272727,0.9533253205128205 -13,Ian Short,0.0,0.6000434027777779 -14,Ms. Victoria Vaughn,0.03409090909090909,0.28587406517094016 -15,Steven Joseph,0.7954545454545455,0.27041599893162394 -16,Amber Mcintosh,0.7954545454545455,0.03996394230769229 -17,Erin Robinson,0.29545454545454547,0.47200520833333337 -18,Donald Blackwell,0.8295454545454546,0.09778979700854695 -19,Connor Martinez,0.7954545454545455,0.9298711271367521 -20,Heather Rodriguez,0.875,0.9288194444444444 -21,David Mathews,0.03409090909090909,0.8312967414529915 -22,Alexis Taylor,0.7613636363636364,0.6302584134615385 -23,Heather Rodriguez,0.625,0.9554286858974359 -24,Penny Clements,0.7840909090909092,0.06705729166666663 -25,Michael Shaw,0.5227272727272727,0.8319811698717949 -26,Brandi Lopez,0.3977272727272727,0.9686498397435899 -27,Caitlin Phelps,0.7840909090909092,0.10586939102564097 -28,Renee Johnson,0.9886363636363638,0.8024338942307694 -29,Joseph Burgess,0.9886363636363638,0.2308526976495726 -30,Jason Mitchell,0.05681818181818182,0.8487413194444444 -31,Brian Sawyer,0.13636363636363635,0.5269764957264957 -32,Jordan Jones,0.3181818181818182,0.8194277510683761 -33,Scott Gross,0.75,0.3090277777777778 -34,Holly Johnson,0.4204545454545454,0.9877971420940173 -35,Alicia Johnson,0.17045454545454547,0.41154180021367526 -36,Kenneth Ayala,0.011363636363636354,0.39830395299145305 -37,David Brown,0.11363636363636365,0.4412560096153847 -38,John Wagner,0.10227272727272729,0.26220285790598297 -39,Melissa Wood,0.011363636363636354,0.7014055822649574 -40,Joel Larson,0.13636363636363635,0.7501669337606838 -41,Regina Aguirre,0.875,0.6367521367521367 -42,Rose Dean,0.6704545454545455,0.30819310897435903 -43,Robert Munoz,0.9204545454545454,0.7743055555555556 -44,Tony Herman,0.6022727272727273,0.44174011752136755 -45,Laura Williamson,0.9659090909090908,0.46704727564102566 -46,Rachel Ramirez,0.7272727272727273,0.1428118322649573 -47,Anthony Mcmillan,0.26136363636363635,0.4316072382478633 -48,Kelly Tucker,0.9659090909090908,0.046424278846153855 -49,Jonathan Myers,0.4318181818181819,0.44140625 -50,Carrie Le,0.05681818181818182,0.9259815705128205 -51,Diane Park,0.21590909090909094,0.12506677350427353 -52,Melissa Rodriguez,0.48863636363636365,0.0543369391025641 -53,Justin Frost,0.46590909090909094,0.4684495192307693 -54,Margaret May,0.2272727272727273,0.7830194978632479 -55,Laura Williams,0.4545454545454546,0.678218482905983 -56,Jeffery Mills,0.7272727272727273,0.26650974893162394 -57,Joshua Roberts,0.3181818181818182,0.7247262286324787 -58,Christina Valencia,0.3522727272727273,0.6882512019230769 -59,John Underwood,0.2840909090909091,0.8633313301282053 -60,Rebecca Marshall,0.6818181818181819,0.11690371260683763 -61,Ashley Hoffman,0.8977272727272727,0.4736411591880342 -62,Kelsey Dorsey DDS,0.875,0.03342013888888884 -63,Jacqueline Flores,0.625,0.6021300747863247 -64,Gabriela Cruz,0.9886363636363638,0.9409388354700856 -65,Benjamin Ryan,0.3181818181818182,0.9863782051282053 -66,Mark Peterson,0.34090909090909094,0.23494257478632474 -67,Carrie Barnett,0.4204545454545454,0.0 -68,Crystal Wright,0.36363636363636365,0.25901442307692313 -69,Stephanie Hull,0.625,0.44260817307692313 -70,Jonathan Tyler,0.8863636363636365,0.7351929754273505 -71,Randy Tate,0.17045454545454547,0.03460536858974361 -72,Jennifer Bell,0.6477272727272727,0.8413294604700856 -73,Mr. Bryan Bryant,0.011363636363636354,0.693442841880342 -74,Gina Jordan,0.6022727272727273,0.1271868322649573 -75,Kenneth Garcia,0.7613636363636364,0.34670472756410253 -76,Steven Flynn,0.4772727272727273,0.80078125 -77,Rachael Mendoza,0.9318181818181819,0.5338040865384617 -78,Raymond Morrow,0.3295454545454546,0.2845385950854701 -79,Megan White,0.04545454545454547,0.38080929487179493 -80,Nicholas Ruiz,0.5,0.3116486378205129 -81,Matthew Kelly,0.7045454545454546,0.917033920940171 -82,Curtis Martin,0.9772727272727273,0.3687065972222222 -83,Anna Herman,0.8295454545454546,0.9113247863247864 -84,Brian Ford,0.8068181818181819,0.9851095085470087 -85,Michael Sutton,0.07954545454545456,0.8482405181623933 -86,Stacey Ruiz,0.40909090909090906,0.7560930822649574 -87,Amy Sanchez,0.625,0.3823784722222222 -88,Kyle Fox,0.23863636363636365,0.16092414529914534 -89,Mr. Michael Harrington,0.8863636363636365,0.6879674145299146 -90,David Stark,0.022727272727272735,0.8139022435897436 -91,Brian Nichols,0.3522727272727273,0.6719751602564104 -92,Paul Ferguson,0.8181818181818182,0.9540264423076923 -93,April Williams,0.4204545454545454,0.14561631944444442 -94,Jack Jarvis,0.9659090909090908,0.8363381410256412 -95,Charles Lewis,0.375,0.20037059294871795 -96,James Martinez,0.011363636363636354,0.6239149305555556 -97,Victoria Martinez,0.2272727272727273,0.6354333600427351 -98,Dawn Robinson,0.6477272727272727,0.7451923076923077 -99,Susan Richardson,0.8977272727272727,0.40364583333333337 diff --git a/data/raw/f_675_data_simon/test_res_2.csv b/data/raw/f_675_data_simon/test_res_2.csv deleted file mode 100644 index 58563ab8..00000000 --- a/data/raw/f_675_data_simon/test_res_2.csv +++ /dev/null @@ -1,101 +0,0 @@ -,Name,Age,Salary -0,Jessica Valdez,0.033707865168539325,0.6486119493059747 -1,Justin Hall,0.6741573033707864,0.8222523972373096 -2,James Williams,0.8651685393258426,0.7656239522564204 -3,Monica Hunter,0.41573033707865165,0.4219808221015221 -4,Christopher Martin,0.29213483146067415,0.8792999396499698 -5,Christine Smith,0.5730337078651686,0.0569972507208476 -6,Emily Rasmussen,0.24719101123595505,0.975340307114598 -7,William Melton,0.0,0.12163883859719704 -8,Jackie Roberson,0.011235955056179775,0.3815127740897204 -9,Howard Taylor,0.14606741573033705,0.3605411386039026 -10,Nicole Dunn,0.6629213483146068,0.511751491986857 -11,Anna Franklin,0.11235955056179775,0.8456715617246696 -12,Jeffrey Davis,0.6179775280898876,0.4520384899081338 -13,Donald Stone,0.6067415730337078,0.6071213035606516 -14,Emily Huang,0.40449438202247184,0.028917722792194733 -15,Paula Sexton,0.056179775280898875,0.8133675316837657 -16,Lauren Carter,0.9213483146067414,0.3787132032454905 -17,Wendy Sullivan,0.7752808988764044,0.513712867967545 -18,Mrs. Nancy Larson,0.02247191011235955,0.09922550794608731 -19,Kimberly Gaines,0.033707865168539325,0.7580969623818145 -20,Michael Chandler,0.6629213483146068,0.16566083283041644 -21,Jordan Hall,0.6179775280898876,0.5375511298866761 -22,Jeffrey Franklin,0.550561797752809,0.7070676590893851 -23,Brian Adams,0.3146067415730337,0.5120029504459197 -24,Michael Holloway,0.8314606741573034,0.8294944008583115 -25,Tami Rodriguez,0.6853932584269662,0.4865050626969758 -26,Dr. Adam Owen,0.49438202247191004,0.817776436665996 -27,Kevin Bradley,0.20224719101123595,0.2564205726547307 -28,Connor White,0.14606741573033705,0.20462013008784286 -29,Timothy Carney,0.8651685393258426,0.9954234560450613 -30,Sandra Murphy,0.797752808988764,0.5070911285455643 -31,Rhonda Castillo,0.16853932584269665,0.2973077180983035 -32,Gabriel Mccall,0.30337078651685395,0.7663280359417958 -33,Monica Peterson,0.15730337078651685,0.6581841346476228 -34,Alicia Hurley,0.25842696629213485,0.8142392543418494 -35,Rebecca Vincent PhD,0.44943820224719105,0.46591899684838733 -36,David Patrick,0.14606741573033705,0.8684201703211961 -37,Tanya Cuevas,0.7640449438202246,0.5447093140213236 -38,Christopher Davis,0.7752808988764044,0.6027962180647756 -39,Robert Nichols,0.3146067415730337,0.28361161402802926 -40,Adam Ross,0.48314606741573035,0.6984007242003621 -41,Lauren Beard,0.9775280898876404,0.9040769798162677 -42,Richard Moore,0.07865168539325842,0.36642526654596663 -43,Jordan Mitchell,0.25842696629213485,0.1439851136592235 -44,Abigail Mitchell,1.0,0.9999999999999999 -45,Joshua Bailey,0.7865168539325842,0.39133641789043117 -46,William Mathis,0.41573033707865165,0.29288204921880234 -47,Courtney Padilla,0.21348314606741575,0.06408837926641187 -48,Joshua Gardner,0.6067415730337078,0.6152350298397372 -49,Kevin Pena,0.49438202247191004,0.8910011399450143 -50,Alexander Summers,0.8426966292134832,0.9960101924495407 -51,Anna Jarvis,0.07865168539325842,0.1819721048749413 -52,Nicholas Adkins,0.16853932584269665,0.355444913833568 -53,Michael George,0.5730337078651686,0.4352243009454838 -54,Kristi Carroll,0.39325842696629215,0.581304901763562 -55,Kimberly Preston,0.3146067415730337,0.19685844565144506 -56,Jennifer Williams,0.7415730337078652,0.3329645275933749 -57,Walter Patterson,0.43820224719101125,0.4020653121437672 -58,Crystal Diaz MD,0.7865168539325842,0.14160464024676456 -59,Tammy Peters,0.752808988764045,0.9544860189096761 -60,Diana Valencia,0.7191011235955056,0.2470327901830618 -61,Alan Sloan,0.595505617977528,0.9679641923154295 -62,Clayton Kelley,0.7191011235955056,0.6567089116877892 -63,Brian Johnston,0.24719101123595505,0.3294776369610407 -64,Jack Benson,0.0,0.8605243747066317 -65,Dana Caldwell,0.5393258426966292,0.11367598739354923 -66,Natasha Lane,0.5730337078651686,0.2310400321866828 -67,Jonathan Lawson,0.21348314606741575,0.3148092268490579 -68,Crystal Murray,0.21348314606741575,0.802823040300409 -69,Sergio Pennington,0.21348314606741575,0.8948903641118487 -70,Leslie Martinez,0.33707865168539325,0.00935425467712736 -71,Susan Wallace,0.11235955056179775,0.4487695299403205 -72,Samantha Lang,0.29213483146067415,0.6985180714812579 -73,Cody Moore,0.0449438202247191,0.7853215315496546 -74,Angela Harris,0.056179775280898875,0.042982632602427406 -75,Karen Holt DDS,0.752808988764045,0.11111111111111105 -76,Lisa Snyder,0.5280898876404494,0.18968349761952652 -77,Tracy Price,0.6741573033707864,0.10266210688660904 -78,Antonio Ibarra,0.14606741573033705,0.6719305304097095 -79,Jacob Maxwell,0.41573033707865165,0.18279353584121238 -80,Jeffrey Dominguez,0.20224719101123595,0.30012405283980415 -81,Jeffrey White,0.6741573033707864,0.5599644605377857 -82,Melissa Bennett,0.1348314606741573,0.6291490645745322 -83,Ashlee Grimes,0.2696629213483146,0.7898142560182392 -84,Brian Torres,0.3146067415730337,0.40112653389660025 -85,Robert Stewart,0.651685393258427,0.21243210621605313 -86,Anne Sanchez,0.696629213483146,0.7914403540535103 -87,Meghan Robinson,0.10112359550561797,0.9749212096828269 -88,Jennifer Brown,0.5617977528089888,0.12304700596794738 -89,Mary Reeves,0.5617977528089888,0.5193455374505466 -90,Kelly Marsh,0.0,0.4845436867162878 -91,Manuel Lewis,0.24719101123595505,0.8650170991752163 -92,Hannah Jones,0.6179775280898876,0.7011500033527794 -93,David Clarke,0.15730337078651685,0.3468115067390867 -94,Jeffery Garza,0.056179775280898875,0.3604405552202776 -95,Ana Welch,0.14606741573033705,0.0 -96,Joshua Jordan,0.7191011235955056,0.8226547307718098 -97,Daniel Gutierrez,0.9550561797752808,0.23402400590089179 -98,Kimberly Roberts,0.5617977528089888,0.6196439348219674 -99,Michele Martin,0.5842696629213482,0.8870783879836385 diff --git a/data/raw/f_675_data_simon/test_res_3.csv b/data/raw/f_675_data_simon/test_res_3.csv deleted file mode 100644 index 3fe5df45..00000000 --- a/data/raw/f_675_data_simon/test_res_3.csv +++ /dev/null @@ -1,101 +0,0 @@ -,Name,Age,Salary -0,Zachary French,0.7640449438202246,0.8638512607296137 -1,Rachel Lee,0.30337078651685395,0.19341805257510725 -2,John Sanchez,0.7865168539325842,0.7962043991416309 -3,Alexandra Collins,0.11235955056179775,0.5554251609442059 -4,Christopher Johnson,0.0898876404494382,0.37766563841201717 -5,Angie Butler,0.8426966292134832,0.9223779506437767 -6,Paul Harding,0.7303370786516854,0.7358335568669528 -7,Jonathan Taylor,0.42696629213483145,0.6297612660944206 -8,Michael Ford,0.28089887640449435,0.7451213787553647 -9,Jason King,0.2696629213483146,0.6965531115879827 -10,Courtney Palmer,0.7191011235955056,0.8976663090128755 -11,Eric Scott,0.11235955056179775,0.7923652092274677 -12,Belinda Armstrong,0.8539325842696628,0.0860213251072961 -13,Tracy Estrada,0.42696629213483145,0.6812969420600857 -14,Richard Garcia,0.6292134831460674,0.6697290772532188 -15,Lucas Strong,0.21348314606741575,0.7752984173819742 -16,Joseph Caldwell,0.8651685393258426,0.29897062768240334 -17,Tiffany Garcia,0.39325842696629215,0.8243528701716737 -18,Steven Jackson,0.9887640449438202,0.9725221298283261 -19,Gabriel Scott,0.40449438202247184,0.36542717274678105 -20,Emily Young,0.49438202247191004,0.11048149141630892 -21,Jamie Jacobson,0.9213483146067414,0.6821016630901288 -22,Hunter Mcclure,0.5730337078651686,0.33020386266094415 -23,Stephanie Andrews,0.6179775280898876,0.6413123658798282 -24,Denise Ingram,0.19101123595505615,0.4803178648068669 -25,Ashley Davis,0.2247191011235955,0.7383986051502146 -26,Veronica Morales,0.9213483146067414,0.8177642167381974 -27,Gary Colon,0.47191011235955055,0.9134254291845493 -28,Jacob White,0.6179775280898876,0.07519112124463512 -29,Jennifer Peterson,0.07865168539325842,0.3234475590128755 -30,Amber Brewer,0.550561797752809,0.1528467006437768 -31,Tracy Thomas,0.24719101123595505,0.4811225858369098 -32,Jimmy Kim,0.6741573033707864,0.7420030847639484 -33,Adrian Ward,0.9325842696629212,0.05735313841201717 -34,Gloria Haynes,0.43820224719101125,0.45121378755364805 -35,Mary Mendoza,0.6179775280898876,0.6809951716738197 -36,Joshua Robinson,0.11235955056179775,0.8990578057939914 -37,Karen Walters,0.7752808988764044,0.41565517703862653 -38,William Collins,0.38202247191011235,0.1994199302575107 -39,Connie Newman,0.10112359550561797,0.5859374999999999 -40,Lucas Davis,0.8426966292134832,0.44455807403433467 -41,Dylan Smith,0.24719101123595505,0.053983369098712375 -42,Courtney Sanchez,0.9887640449438202,0.9141798551502146 -43,Craig Villa,0.49438202247191004,0.9126374731759656 -44,Eric Madden,0.24719101123595505,0.9999999999999999 -45,Katrina Roberts,0.41573033707865165,0.23157524141630892 -46,Alicia Hebert,0.752808988764045,0.48712446351931327 -47,John Lee,0.9550561797752808,0.2882074839055794 -48,Russell Graves,0.15730337078651685,0.4854479613733905 -49,Justin Wall,0.6741573033707864,0.5763814377682402 -50,Paula Bowers,0.9325842696629212,0.4310287017167381 -51,Joseph Pena,0.8314606741573034,0.1414800160944205 -52,Wendy Vaughan,0.595505617977528,0.7362694474248926 -53,Rachel Martinez,0.34831460674157305,0.6081679184549357 -54,James Reyes,0.7303370786516854,0.018324168454935563 -55,Erica Brooks,0.15730337078651685,0.5237728004291845 -56,Kenneth Owens,0.33707865168539325,0.9908798283261803 -57,Angela Estrada,0.6741573033707864,0.13445547210300424 -58,Ronald Mccarthy,0.8426966292134832,0.2028902896995708 -59,Paul Lane,0.651685393258427,0.8860481491416309 -60,Don Roberts,0.8539325842696628,0.7873859978540773 -61,Michael Franklin,0.8764044943820224,0.1366349248927038 -62,Chelsea Ward,0.47191011235955055,0.07894648605150212 -63,Christopher Cooper,0.0,0.04097371244635184 -64,Jacob Dean,0.7415730337078652,0.05564310622317592 -65,Heather Obrien,0.14606741573033705,0.9659167113733905 -66,Amanda Mcbride,0.16853932584269665,0.19370305793991416 -67,Benjamin Day,0.0898876404494382,0.6109006169527896 -68,Jeffrey Stone,0.1797752808988764,0.22746781115879822 -69,Margaret Garcia,0.12359550561797752,0.12842006437768239 -70,Corey Orr,0.12359550561797752,0.6849684817596565 -71,Brian Jones,0.5730337078651686,0.5086004560085836 -72,Gabriela Hernandez,0.1797752808988764,0.48605150214592274 -73,Jonathan Reed,0.2247191011235955,0.16697961373390557 -74,Carolyn Lewis,0.5842696629213482,0.3242019849785407 -75,Jon Hester,0.6741573033707864,0.7745775214592273 -76,Melissa Neal,0.5280898876404494,0.4449269045064377 -77,Kenneth Patel,0.6179775280898876,0.38422076180257503 -78,Darryl Martin,0.7865168539325842,0.06855217274678105 -79,Victor Martin,0.38202247191011235,0.02424222103004292 -80,Andre Jones,1.0,0.2548618562231759 -81,Robert Lee,0.07865168539325842,0.425412419527897 -82,Lisa Deleon,0.0449438202247191,0.34143642703862653 -83,James Krueger,0.595505617977528,0.9702253218884119 -84,Joyce Garcia,0.9775280898876404,0.17729010193133043 -85,Tiffany Ayala,0.943820224719101,0.0 -86,Jonathan Thompson,0.033707865168539325,0.18723175965665229 -87,Jennifer Aguirre,0.47191011235955055,0.35059012875536477 -88,Hannah Weiss,0.8314606741573034,0.23561561158798283 -89,Laura Woods,0.15730337078651685,0.057001072961373356 -90,Ivan Deleon,0.595505617977528,0.39595627682403434 -91,Adrian Hansen,0.7415730337078652,0.9975020118025751 -92,Mary Duncan,0.6629213483146068,0.22505364806866945 -93,Dakota Price,0.6067415730337078,0.46687231759656644 -94,Tasha Hodges,0.8202247191011236,0.37372585836909866 -95,Timothy Lewis,0.0,0.12404439377682397 -96,Susan Roberts,0.7415730337078652,0.8336071620171673 -97,Renee Lam,0.033707865168539325,0.6634589592274677 -98,Leslie Hebert,0.47191011235955055,0.30691724785407726 -99,Rebecca Glover,0.15730337078651685,0.7688103540772532 diff --git a/data/raw/f_675_data_simon/test_res_4.csv b/data/raw/f_675_data_simon/test_res_4.csv deleted file mode 100644 index 58cf1c79..00000000 --- a/data/raw/f_675_data_simon/test_res_4.csv +++ /dev/null @@ -1,101 +0,0 @@ -,Name,Age,Salary -0,Shawn Fry,0.44943820224719105,0.0385368261587431 -1,Megan Meyer,0.5393258426966292,0.16739008323545923 -2,Micheal Calhoun,0.8314606741573034,0.9384500161704878 -3,Taylor Horne,0.14606741573033705,0.17922007183100985 -4,Jessica Anderson,0.8764044943820224,0.647074843827129 -5,Jeffrey Colon,0.20224719101123595,0.31314575567243685 -6,Samantha King,0.19101123595505615,0.23479548588061072 -7,Jennifer Paul,1.0,0.8000646819520332 -8,Ellen Cooper,0.19101123595505615,0.9896849308073328 -9,Benjamin Jones,0.7078651685393258,0.26599601695347996 -10,William Wilson,0.5280898876404494,0.6073805511583175 -11,Bill Reyes,0.1797752808988764,0.9538545336941904 -12,Patricia Wilkerson,0.056179775280898875,0.04359223135712942 -13,Stephen Davis,0.9101123595505618,0.149823826788541 -14,Haley Rodriguez,0.7078651685393258,0.9598120819077771 -15,Bryce Flynn,0.943820224719101,0.7359955063064902 -16,Michael Scott,0.550561797752809,0.8146010995931845 -17,David Hall,0.797752808988764,0.7749408500570221 -18,Robert Juarez,0.8314606741573034,0.1804286030400517 -19,Brittany Shelton,0.7303370786516854,0.6564707484382712 -20,Lindsey Richardson,0.595505617977528,0.6665815588350439 -21,James Nguyen,0.07865168539325842,0.0 -22,Amber Beltran,0.6853932584269662,0.7269570545881632 -23,Deanna Ramirez,0.34831460674157305,0.6764710888695977 -24,Kristine Espinoza,0.011235955056179775,0.059064835146130035 -25,Vanessa Coffey,0.752808988764045,0.8303800915760267 -26,Kimberly Grimes,0.07865168539325842,0.5741033889938552 -27,Patricia Boyle,0.6179775280898876,0.05402645151406826 -28,Scott Michael,0.8876404494382022,0.2716982416721986 -29,Joanna Hall,0.0898876404494382,0.9017344976084698 -30,Jordan Anderson,0.42696629213483145,0.6759944850125108 -31,Briana Macias,0.14606741573033705,0.3148479123048902 -32,Jason Graham,0.43820224719101125,0.6791945394815231 -33,Eric Mooney,0.8089887640449438,0.954960935505285 -34,Walter Andrade,0.8539325842696628,0.13503208565252167 -35,Claudia Davis,0.9213483146067414,0.2576384278881342 -36,Emily Mann,0.9213483146067414,0.5602818771383342 -37,Steven Vargas,0.0,0.3182862687024459 -38,Catherine Stone,0.651685393258427,0.7567788387887453 -39,Gina Roberts,0.056179775280898875,0.3385419326286404 -40,Jeremy Evans,0.0449438202247191,0.17441999012749143 -41,Gregory Ellis,0.7640449438202246,0.683279715399411 -42,Carl Jacobson,0.7078651685393258,0.24752761749136154 -43,Timothy Quinn,0.6741573033707864,0.1305043490101958 -44,David Orr,0.6292134831460674,0.0032511191679858165 -45,Tracy Gallegos,0.38202247191011235,0.9229433692488382 -46,Anna Freeman,0.23595505617977525,0.9911658070775671 -47,Brian Lee,0.550561797752809,0.9507906517557745 -48,Brandon Simmons,0.20224719101123595,0.4079388585337622 -49,Heather Jones,0.41573033707865165,0.34746123338269586 -50,Debbie Garcia,0.5617977528089888,0.09006110742310502 -51,Gary Munoz,0.0898876404494382,0.10943164990042376 -52,Dennis Phillips,0.595505617977528,0.08934620163747464 -53,Paul Ashley,0.8539325842696628,0.9613950875759587 -54,Jon Brown,0.7303370786516854,0.12824048068903293 -55,Tracey Webb,0.24719101123595505,0.7744982893325842 -56,Michelle Love,0.32584269662921345,0.6027677066843691 -57,Matthew Mayer,0.7415730337078652,0.3728574103388993 -58,David Robinson,0.06741573033707865,0.13419802890261956 -59,Heather Morgan,0.6067415730337078,0.6077890687501064 -60,Tracey Greer,0.42696629213483145,0.81312022332295 -61,Monica Rivas,0.40449438202247184,0.9791996459514203 -62,Elizabeth Jimenez,0.7415730337078652,0.5028000476603856 -63,Benjamin Bautista,0.21348314606741575,0.20485455071575676 -64,Mrs. Megan Peters DVM,0.6067415730337078,0.5916185807417997 -65,Alexander Kennedy,0.7865168539325842,1.0 -66,Tony Hart,0.5393258426966292,0.3185926568962876 -67,Daniel Burton,0.02247191011235955,0.9592163270864185 -68,Allison Morgan,0.06741573033707865,0.5907164377265994 -69,Elizabeth Roberts,0.9325842696629212,0.8002348976152784 -70,Logan Moreno,0.8089887640449438,0.8680998825511923 -71,Nicholas Wilkins,0.0449438202247191,0.25974910211237634 -72,Sophia Charles,0.5168539325842696,0.6209807826516196 -73,Bruce Alvarez,0.12359550561797752,0.5011319341605813 -74,James Smith,0.48314606741573035,0.4175560435071235 -75,Brittany Warren,0.9550561797752808,0.8816660709118451 -76,Jessica Howard,0.49438202247191004,0.671722071865053 -77,Beverly Mendoza,0.6741573033707864,0.47990604095388845 -78,Rebecca Miller,0.7191011235955056,0.1728199628929854 -79,Andrew Fuentes,0.8089887640449438,0.517302422168888 -80,Pamela Manning,0.32584269662921345,0.3914619823316141 -81,Angela Rasmussen,0.3595505617977528,0.8609338031285638 -82,Julie Whitney,0.7865168539325842,0.9689696845903759 -83,Nicholas Delgado,0.8539325842696628,0.013140649202539545 -84,William Peters,0.5730337078651686,0.6333384398032307 -85,Laura Short DVM,0.43820224719101125,0.7885410815503242 -86,Hayden Johnson,0.21348314606741575,0.3799894466288787 -87,Janice King,0.48314606741573035,0.7710599329350285 -88,Cheryl Ramos,0.19101123595505615,0.19341605814567053 -89,Nicholas Harris,0.6853932584269662,0.27474510204429003 -90,Zachary Fields,0.7865168539325842,0.16587516383257583 -91,Sara Duran,0.41573033707865165,0.5184939318116053 -92,Elizabeth Miller,0.7752808988764044,0.02136206573728905 -93,Timothy Frank,0.9662921348314606,0.8908747382934177 -94,James White,0.28089887640449435,0.0489199816167083 -95,Deborah Jackson,0.43820224719101125,0.14812167015608768 -96,Kimberly Henry,0.40449438202247184,0.23231033719722882 -97,Robin Adams,0.9662921348314606,0.4934892508808659 -98,Cameron Bennett,0.0898876404494382,0.5089788762361911 -99,Danielle Kelly,0.06741573033707865,0.5389708761000187 diff --git a/data/raw/f_675_data_simon/test_single.csv b/data/raw/f_675_data_simon/test_single.csv deleted file mode 100644 index fefcf9f5..00000000 --- a/data/raw/f_675_data_simon/test_single.csv +++ /dev/null @@ -1,2 +0,0 @@ -Name,Age,Salary -Olivia Anderson,0.10465116279069767,0.8222975321515467 \ No newline at end of file diff --git a/data/raw/f_675_simon.py b/data/raw/f_675_simon.py deleted file mode 100644 index d563cd4e..00000000 --- a/data/raw/f_675_simon.py +++ /dev/null @@ -1,153 +0,0 @@ -import pandas as pd -from sklearn.preprocessing import MinMaxScaler - -def f_675(file_name: str) -> pd.DataFrame: - """Normalize data in a csv file using MinMaxScaler from sklearn. - Only numeric columns are normalized. Columns with other dtypes are left as - they are. - - Parameters: - file_name (str): The name of the csv file. - - Returns: - DataFrame: A pandas DataFrame with normalized data. - - Raises: - ValueError: If input does not have numeric columns. - - Requirements: - - pandas - - sklearn.preprocessing.MinMaxScaler - - Example: - >>> normalized_data = f_675("sample.csv") - >>> print(normalized_data.head()) - - Name Age Salary - 0 Alex Anderson 0.304651 0.122298 - 1 Mr. Leslie Casey 0.28140 0.598905 - 2 Anthony George 0.996744 0.216552 - 3 Brian Washington 0.126279 0.459948 - 4 Elias Lawrence 0.337239 0.124185 - - >>> normalized_data = f_675("test.csv") - >>> print(normalized_data.head()) - - Fruit Weight Amount - 0 Aplple 1 0.5 - 1 Mr. Leslie Casey 0.32140 0.998905 - 2 Anthony George 0.8998888 0.123784 - 3 Brian Washington 0.121222 0.445321 - 4 Elias Lawrence 0.345223 0 - - """ - df = pd.read_csv(file_name) - if df.select_dtypes(include='number').empty: - raise ValueError("Input must at least have one numeric column.") - - scaler = MinMaxScaler() - numeric_columns = df.select_dtypes(include='number').columns - df[numeric_columns] = scaler.fit_transform(df[numeric_columns]) - - return df - -import unittest -import pandas as pd -import os - - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -class TestCases(unittest.TestCase): - - def test_case_0(self): - # input without numeric columns\ - csv_path = os.path.join("f_675_data_simon", "test_non_numeric.csv") - self.assertRaises(Exception, f_675, csv_path) - csv_path = os.path.join("f_675_data_simon", "test_empty.csv") - self.assertRaises(Exception, f_675, csv_path) - - def test_single_row(self): - # input only one row - csv_path = os.path.join("f_675_data_simon", "test_single.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected = pd.DataFrame({ - 'Name': ['Olivia Anderson'], - 'Age': [0.0], - 'Salary': [0.0] - }) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_1(self): - csv_path = os.path.join("f_675_data_simon", "test_data_0.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected_path = os.path.join("f_675_data_simon", "test_res_0.csv") - expected = pd.read_csv(expected_path, index_col=0) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_2(self): - csv_path = os.path.join("f_675_data_simon", "test_data_1.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected_path = os.path.join("f_675_data_simon", "test_res_1.csv") - expected = pd.read_csv(expected_path, index_col=0) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_3(self): - csv_path = os.path.join("f_675_data_simon", "test_data_2.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected_path = os.path.join("f_675_data_simon", "test_res_2.csv") - expected = pd.read_csv(expected_path, index_col=0) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_4(self): - csv_path = os.path.join("f_675_data_simon", "test_data_3.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected_path = os.path.join("f_675_data_simon", "test_res_3.csv") - expected = pd.read_csv(expected_path, index_col=0) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_5(self): - csv_path = os.path.join("f_675_data_simon", "test_data_4.csv") - df = f_675(csv_path) - self.assertIsInstance(df, pd.DataFrame) - self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) - self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) - - expected_path = os.path.join("f_675_data_simon", "test_res_4.csv") - expected = pd.read_csv(expected_path, index_col=0) - - pd.testing.assert_frame_equal(df, expected) - - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_675_simon_chien_edit.py b/data/raw/f_675_simon_chien_edit.py new file mode 100644 index 00000000..7ff665f3 --- /dev/null +++ b/data/raw/f_675_simon_chien_edit.py @@ -0,0 +1,146 @@ +import pandas as pd +from sklearn.preprocessing import MinMaxScaler + + +def f_675(file_name: str) -> pd.DataFrame: + """Normalize data in a csv file using MinMaxScaler from sklearn. + Only numeric columns are normalized. Columns with other dtypes are left as + they are. + + Parameters: + file_name (str): The name of the csv file. + + Returns: + DataFrame: A pandas DataFrame with normalized data. + + Raises: + ValueError: If input does not have numeric columns. + + Requirements: + - pandas + - sklearn.preprocessing.MinMaxScaler + + Example: + >>> normalized_data = f_675("sample.csv") + >>> print(normalized_data.head()) + Name Age Salary + 0 Alex Anderson 0.304651 0.122298 + 1 Mr. Leslie Casey 0.28140 0.598905 + 2 Anthony George 0.996744 0.216552 + 3 Brian Washington 0.126279 0.459948 + 4 Elias Lawrence 0.337239 0.124185 + """ + df = pd.read_csv(file_name) + if df.select_dtypes(include='number').empty: + raise ValueError("Input must at least have one numeric column.") + + scaler = MinMaxScaler() + numeric_columns = df.select_dtypes(include='number').columns + df[numeric_columns] = scaler.fit_transform(df[numeric_columns]) + + return df + + +import unittest +import pandas as pd +import tempfile +import os +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Set up a temporary directory + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Clean up by removing the directory + shutil.rmtree(self.test_dir) + + def create_csv(self, filename, data): + # Helper function to create a CSV file with the given data + full_path = os.path.join(self.test_dir, filename) + data.to_csv(full_path, index=False) + return full_path + + def test_non_numeric_and_empty(self): + # Test with non-numeric and empty data + non_numeric_df = pd.DataFrame({ + "Name": ["Alice", "Bob"], + "City": ["New York", "Los Angeles"] + }) + empty_df = pd.DataFrame() + + non_numeric_path = self.create_csv("non_numeric.csv", non_numeric_df) + empty_path = self.create_csv("empty.csv", empty_df) + + self.assertRaises(ValueError, f_675, non_numeric_path) + self.assertRaises(ValueError, f_675, empty_path) + + def test_single_row(self): + # Test with a single row of numeric data + single_row_df = pd.DataFrame({ + "Name": ["Olivia Anderson"], + "Age": [35], + "Salary": [58000] + }) + csv_path = self.create_csv("single_row.csv", single_row_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] == 0).all() and (df['Salary'] == 0).all()) + + def test_multiple_rows(self): + # Test multiple rows with numeric data + data_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000] + }) + csv_path = self.create_csv("multiple_rows.csv", data_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + + def test_mixed_columns(self): + # Test with a mix of numeric and non-numeric columns + mixed_df = pd.DataFrame({ + "Name": ["Alice", "Bob", "Charlie"], + "Age": [25, 35, 45], + "Salary": [50000, 60000, 70000], + "City": ["New York", "Chicago", "San Francisco"] + }) + csv_path = self.create_csv("mixed_columns.csv", mixed_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + self.assertTrue('City' in df.columns and df['City'].equals(mixed_df['City'])) + + def test_large_dataset(self): + # Test with a large dataset to ensure scalability + large_df = pd.DataFrame({ + "Age": range(10000), # Large range of ages + "Salary": range(10000, 20000) # Large range of salaries + }) + csv_path = self.create_csv("large_dataset.csv", large_df) + df = f_675(csv_path) + + self.assertIsInstance(df, pd.DataFrame) + self.assertTrue((df['Age'] >= 0).all() and (df['Age'] <= 1).all()) + self.assertTrue((df['Salary'] >= 0).all() and (df['Salary'] <= 1).all()) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_694_data_simon/test_data_1.csv b/data/raw/f_694_data_simon/test_data_1.csv deleted file mode 100644 index e3aef5d7..00000000 --- a/data/raw/f_694_data_simon/test_data_1.csv +++ /dev/null @@ -1,401 +0,0 @@ -Feature_1,Feature_2,Feature_3,Feature_4,Feature_5 -0.15416284237967237,0.7400496965154048,0.26331501518513467,0.5337393933802977,0.014574962485419674 -0.918747008099885,0.9007148541170122,0.03342142762634459,0.9569493362751168,0.13720932135607644 -0.28382835295794584,0.6060831843588289,0.9442251360530421,0.852735541109285,0.002259233518513537 -0.5212260272202929,0.5520376332645666,0.4853774136627097,0.7681341540644223,0.1607167531255701 -0.7645604503388788,0.020809797952066167,0.13521017836911686,0.11627301740168983,0.30989758449002924 -0.6714526452120027,0.4712297782500141,0.8161682980460269,0.28958678353055356,0.7331259776126706 -0.7026223552552463,0.32756947626342303,0.3346475291060558,0.9780580790165189,0.6245821117353524 -0.9503135246995253,0.7674756506204887,0.8250092532147735,0.40664030180666166,0.4513084114213143 -0.40063162736167357,0.9951381603385585,0.1775641759378893,0.9625969030278503,0.4192502702591062 -0.4240524465509987,0.4631488697273459,0.37372314882391533,0.465508100172511,0.03516826147080476 -0.08427266973184566,0.7325206981419501,0.6361999938328675,0.027907788861532556,0.3001700598239866 -0.2208525208288855,0.055019993340200135,0.5232460707782919,0.4163696572946102,0.048218749825570395 -0.5724045155296094,0.8035166506773763,0.11360171953981701,0.2771172144251962,0.6416780916854229 -0.49273730543650784,0.5065560756680948,0.4614402600659552,0.8947430739278908,0.6057776931355849 -0.6033556031675537,0.4438718561912689,0.4800467833268902,0.8884475272296929,0.2085004884601801 -0.9445814621563249,0.07347004394226553,0.5951524566360905,0.031151070851766383,0.6652574325307892 -0.6373854951409694,0.862465162297615,0.9416377123396569,0.4456270534106257,0.669957632435755 -0.9243306996319917,0.6194234667677466,0.32258573270881385,0.3194763109335812,0.2914566850066529 -0.9574174237390571,0.4059583429596427,0.9465558223628031,0.8571905588319436,0.6889276660324576 -0.0032879687523396095,0.9001034996569824,0.9198648709623234,0.0059093558684842895,0.6429261484358456 -0.38521698024307416,0.5957418942869064,0.6108070555044093,0.5996191883973377,0.3122839968432516 -0.06987367476308803,0.8007342342155275,0.9114340501333191,0.19467462904253707,0.21060763885288392 -0.3774407823606447,0.40284721017267344,0.8851776944327949,0.20215434337007254,0.4190148274837505 -0.6479591733392613,0.511642952793331,0.3338324855146031,0.7784734338593446,0.1354351345676108 -0.19171077225555577,0.4025891478959648,0.7868444652477247,0.2538081939276894,0.10948542133151884 -0.9306760143818863,0.1263872837745974,0.6050053903635636,0.29012193658782715,0.6265293919225703 -0.6594759538408164,0.06772616162569478,0.3554429248010227,0.3347240774767726,0.1775063672167354 -0.008003339782281182,0.010438402094721133,0.5974588046391233,0.08463852035212138,0.3156682369286655 -0.9686183289699668,0.8735825512370129,0.9127839941708243,0.16989826093188554,0.3168079006890451 -0.2417802357140627,0.4909549698249829,0.5439434095685136,0.40545438708501025,0.7424103131767936 -0.6437330157652027,0.6593433507146707,0.14922368526773322,0.7465837898057559,0.6414475956156165 -0.2982310292316368,0.3758278953194599,0.9886278651751473,0.3445949402799181,0.34344500065127515 -0.6136534543751032,0.6183927127393399,0.013353422191246023,0.5744327948270689,0.7776794520796216 -0.9498820771377502,0.7029821731000113,0.1162057334478247,0.24660147812309097,0.4189386807175727 -0.6320315467138395,0.09547025346487126,0.3903092094231321,0.32196758753072663,0.971325293927578 -0.9691823796114413,0.7434179682255139,0.5176892950354683,0.16447995611891109,0.9493815703849737 -0.9218259546838844,0.8825707003390507,0.05103860002507066,0.774940215434729,0.10589515242345926 -0.9053958666296535,0.5878864847116027,0.9880897576949188,0.42866135651617787,0.6298499789511404 -0.7762813342653192,0.25981748776761393,0.6389130692621574,0.8267999751606756,0.5213052583204294 -0.4244851388775528,0.5310030378042822,0.9459781226700603,0.9992736621442428,0.6709235888482614 -0.7895761370422827,0.9815875546936368,0.14155841215261689,0.026287019886786056,0.7464427612241473 -0.6165880740404701,0.0992046992760729,0.5219026753571561,0.5074319814786622,0.28569302592260826 -0.2484384892748085,0.18034918019694157,0.5838230669751122,0.15391860735096496,0.7117275877052686 -0.8587524054520352,0.6864413707981849,0.2651296088121916,0.39703270686735204,0.5130160252122433 -0.34420422080479374,0.5456023269980058,0.686116734830431,0.299692894993413,0.215357823464707 -0.7883087899319129,0.7758489459474068,0.041725950621869345,0.6454853695442804,0.12819310250608618 -0.5476525029423075,0.29152344665594654,0.8258451161973028,0.09735237194458535,0.8975119113585548 -0.3419891459847828,0.13208530504150873,0.4575829484147603,0.08443780806374235,0.7402265932444464 -0.3637378187811511,0.3335611622162322,0.1064246635463526,0.18614562007599367,0.5192488047646937 -0.11262227904358624,0.48392558946415953,0.9314883770043625,0.39885519442826,0.180391984533548 -0.8054920402860906,0.6607354619910755,0.6134351145389226,0.12388215492523169,0.3515415467691554 -0.47309980160972587,0.37838530062484954,0.22192366622281312,0.2268802895194798,0.14282814495268292 -0.7294251211685133,0.12301160940997591,0.3197779740723209,0.3916596206529702,0.037941219451945285 -0.1550112869052518,0.17064900621690204,0.7703122055618102,0.23919854104233962,0.7471700872339463 -0.3474941846440953,0.6612133682000852,0.6545539998323858,0.2329759428922995,0.4209747700081974 -0.6765435155499492,0.6053452862559077,0.6086805065416605,0.03948342177566533,0.9054359547749586 -0.8890563808994844,0.17868128271276496,0.3879035282574149,0.90763459414889,0.45213155017341855 -0.4332931367915722,0.7096122693669313,0.16090213785125262,0.863751825839311,0.013862581448035205 -0.07409320943301223,0.37591574303508424,0.24936352290052022,0.5126233448800741,0.14348025691993171 -0.6416555013357216,0.5391055037985244,0.8299141757699909,0.26912490795327515,0.1710557216327111 -0.5264327644174464,0.24185540873261835,0.4562141122684744,0.11648201134691927,0.5998066280688502 -0.8366348926718696,0.9123319112936716,0.9506270720400909,0.5815293692865139,0.5146903058809136 -0.7767162119208655,0.6720973260820745,0.16378754066337053,0.4275583551925739,0.154392660663615 -0.5667015973043502,0.5091030896834741,0.4537785160900535,0.991011271964969,0.9001465523352601 -0.15142577922920764,0.607674751836506,0.4782043133828816,0.8286244464591646,0.929425450951965 -0.8287651081562799,0.02676703691902571,0.8902708182425277,0.16429847990240676,0.33397227460555823 -0.3773011594614126,0.8182809341277971,0.19805308691961931,0.17602604697827584,0.9924360529903925 -0.044698744344091046,0.3108488857896399,0.4115835711828919,0.22526325371563827,0.27060288082145334 -0.10721772670927465,0.811020665849667,0.24598008770581148,0.6338517157236564,0.36043789015285155 -0.004862687774620622,0.24722089794128133,0.8299071951997287,0.4111603222947615,0.0899145839845562 -0.9747374998493828,0.6523622640319752,0.6561555863113276,0.9275162708400105,0.9330226187676535 -0.878256588886791,0.401525312295424,0.2789496611029252,0.4809249966668775,0.7568208313633393 -0.5088825801648179,0.3752731886427574,0.5977516640696615,0.6421631853185381,0.9785298456822152 -0.6215381952322243,0.3934004315978301,0.9348383717071745,0.7318207463001706,0.430569631782998 -0.4595171267920204,0.9987904088746391,0.9635981841662068,0.006519602935150193,0.8459551706096324 -0.362481133906683,0.39728661495454576,0.2944596459776291,0.6147961666526938,0.17663557943678965 -0.20838185059400693,0.21628822450505747,0.5614268243276009,0.10308732733400872,0.31132597690349695 -0.8068815704328801,0.48603506541372243,0.9557343134313784,0.7145570085319356,0.11524533439970253 -0.4478617951302597,0.5267694798564344,0.8963368630171968,0.9202531520660547,0.053283647888419994 -0.9097006677201099,0.689073459619526,0.5744408040463054,0.918326609580169,0.40503062548248214 -0.672450600105991,0.8254855966855543,0.8160083655466775,0.560362553856769,0.5596896373689062 -0.8580126387117512,0.20553461768942993,0.7157409166048591,0.8158715426220654,0.11927652338471562 -0.8486721618941443,0.12522226809225545,0.05371435266684366,0.4706306579827624,0.938750645666193 -0.9475383118599195,0.5867730010725127,0.8131355920573413,0.25125457159025977,0.6411315655900918 -0.16578035775722122,0.25332269592996326,0.9313555007806424,0.8973316030283085,0.7429063262274186 -0.47350336340198995,0.8805503018277316,0.17069880372151647,0.5078677260245898,0.5991806472802751 -0.037243964161457144,0.8155382653887234,0.2815902695954221,0.9253010930817673,0.9330412413240283 -0.7878175045077981,0.8269103116227282,0.3030629517893897,0.15855023545904678,0.3664497095613277 -0.3932554927350794,0.9358335243236882,0.8298802986003807,0.8404899875516602,0.6933951381912136 -0.797917673036475,0.4645693916825089,0.8999122081055371,0.9703503236319627,0.4111898349808447 -0.9055898273960102,0.7125822058460684,0.6419970204997965,0.5258110940039517,0.9442141935138029 -0.36253254932099876,0.13989053513619143,0.06028430493349468,0.8081096191078069,0.5378351941841588 -0.239302629159727,0.3421830848583508,0.6576556726273131,0.7486117805901735,0.08535958832588919 -0.6222308511260837,0.6041091948473405,0.7951910809319962,0.22430125128133127,0.16155095329983182 -0.750567327284179,0.3878757273165241,0.1215389819342273,0.4376986167339755,0.7982304643804436 -0.3458258375656963,0.19886797643022613,0.765620003122625,0.06566735264966739,0.09558583691376066 -0.02749915471774078,0.8016881172414234,0.3534261836508841,0.20628799037128165,0.002554196479065962 -0.267863283794349,0.14237178577338228,0.8745874640480864,0.11178575248877354,0.7652287282732454 -0.5169017344827974,0.03932998422892742,0.19915747523321448,0.34295973728410245,0.3647179625069782 -0.2970711552784341,0.6420688903938824,0.09204979097197918,0.08285077272527641,0.5391198604805868 -0.30964211874462266,0.6716385405659145,0.28185478830697497,0.6793215258910709,0.8799469374533007 -0.34658845066995214,0.25715504658543065,0.7455737671760411,0.13702095767026767,0.5231519132791268 -0.9250415754003132,0.29199682145108063,0.7508544605925069,0.40645544550280466,0.8385669565781895 -0.7459509110832707,0.026795289090797647,0.8941195259921251,0.16855881828088704,0.880234115794199 -0.39565264532802946,0.05240424705203539,0.8825898062615798,0.6326534186157398,0.28979325003340206 -0.8674816242296584,0.2590806711825623,0.3293020722710178,0.5540691590889425,0.6018736416102922 -0.4704971108756322,0.7915043874967372,0.5411778375385803,0.6623366013516545,0.07865776685364256 -0.9045176214579898,0.08088101666623482,0.6897407989041364,0.617731641758984,0.2566201741000034 -0.9997419299050934,0.8592871445561695,0.4757618512928625,0.9987123114553902,0.4034172461770513 -0.43493025928568696,0.6884467174338395,0.26414166005354134,0.441125769765761,0.6718219363488396 -0.24284481191453022,0.44604563000295805,0.0962310390919735,0.9494186141113726,0.613699832135513 -0.1402185849315467,0.5561807058158273,0.07422679295804768,0.4489843650587102,0.8351526861723297 -0.6466637578587885,0.39774160208734777,0.6444616628974448,0.9274893040080311,0.22679054713975122 -0.8970351119519802,0.25500485841400045,0.6218360325683211,0.14788345533547487,0.49377663648735826 -0.9477711632499198,0.45072491416368377,0.7284366401949913,0.47766425577190996,0.05675039086349254 -0.4804062516102159,0.2641024941572446,0.9716774705910458,0.7877370055878953,0.8842466276939615 -0.5787910367368045,0.9790242837689553,0.8970150373791738,0.46198303770764815,0.5950638333464884 -0.2965033609067024,0.4194839022482725,0.04716266313652151,0.7259149027914831,0.3432161089514869 -0.43903783553670583,0.39781526578920323,0.8624483471260455,0.27761430768866957,0.6113921803707146 -0.21913211640693697,0.30641462947745757,0.734408213056018,0.5238561274396387,0.4189955043672612 -0.2635037966875571,0.06476168149784545,0.0321195255465363,0.8242516673472671,0.6869991113507257 -0.765068419761585,0.6179775381588708,0.05680785679749112,0.6768572042987306,0.15076377028518217 -0.965033653984216,0.9523384434403166,0.9633881605368979,0.4344343322977491,0.6445177100064583 -0.07855171112256676,0.8872377126157872,0.7265988860186824,0.32177203703299195,0.5072882061030659 -0.19682961162293533,0.6296094510334698,0.9890300165308736,0.27324111688328057,0.05577858732444729 -0.09453026507529116,0.4927959093491514,0.8851337820949404,0.05011614634752137,0.49818980714158034 -0.4192708667676259,0.6820975603877187,0.6200284162324146,0.6015888993614622,0.510113810576061 -0.5231408848812849,0.0958949840013037,0.46573481389262716,0.2681621966988056,0.38660310013017063 -0.5766229803852926,0.7843920703208338,0.6929913353655328,0.2432311204510037,0.8674362892237333 -0.5492566778908761,0.16006248046577298,0.6363704559058008,0.8799700995972736,0.5869543880502379 -0.8436144740937097,0.8535860145776634,0.8609028667819985,0.08490412572479211,0.3572404339761751 -0.20914829787272537,0.9833703690322853,0.6718827427081613,0.9099108902038618,0.55605894506456 -0.26547669760040915,0.1094604701138453,0.4536690400616854,0.6120733570947645,0.6936984609944205 -0.9786024317147083,0.10526073880888143,0.42194937378304254,0.9142331465732655,0.5216587009345063 -0.6809502012105494,0.8486669652932888,0.5082004970365874,0.8259163785985607,0.21567271597617987 -0.7055809718445905,0.33251649923933013,0.8028235655058684,0.34837730338768147,0.6982089873154393 -0.8259941993699876,0.8418552731523521,0.2480926082053213,0.9954513462113844,0.00427644352758183 -0.4069325044894543,0.32998274016025897,0.9542931856243891,0.5655157155514706,0.7439607041801324 -0.36256908772532903,0.18589540943159466,0.026615067011762195,0.12975262059293458,0.3431797746464612 -0.7275260195401277,0.4626912376998261,0.7010798181739247,0.5804085116084441,0.34095177425375156 -0.4872310207824382,0.49694206249082196,0.17025886084659902,0.5081525091966265,0.9238445035268107 -0.6158551016045146,0.8304342643389905,0.9515281517715709,0.004698225902617592,0.6609263771009867 -0.384294759639799,0.9039131912217162,0.6085919140784006,0.36514931180397203,0.11796143014557947 -0.04994184320059447,0.7173687253620038,0.9488963886191596,0.4000482353208322,0.28214972522220294 -0.07547969881719652,0.7478950353271534,0.11939934408014097,0.5545086110835651,0.5406003076176303 -0.2732546476614286,0.8673397468373982,0.44136987864371013,0.45967415466665607,0.8148909518355534 -0.8059571063393413,0.4103176510477784,0.2660004154375796,0.7367040145509781,0.5292870758791325 -0.7036422853240553,0.9642220235065393,0.7593413010875437,0.18239147514721732,0.46141816275771275 -0.8745119830718312,0.2573350662910133,0.7813383081754921,0.5734406410619634,0.2822325197365869 -0.7198777821352647,0.08775730864309184,0.2560757064367235,0.2299220758639836,0.22993770086762388 -0.2738237448956279,0.27768570000559134,0.9638567442795015,0.35867364626416165,0.33371049533757946 -0.7341455948471967,0.4621249879092766,0.4933538910702978,0.8298551101947249,0.1355070170053998 -0.5189437209436469,0.0389839930794037,0.2535932360729716,0.23961022845000213,0.3682542307382227 -0.33020553887463133,0.3149736415377087,0.9977122962801849,0.5765373544939678,0.37445100155410105 -0.5237761024311335,0.9239633263644682,0.7838145680029014,0.4705902885479949,0.14863797446864524 -0.07304610297035863,0.002315235361871726,0.9054122794366988,0.1675217351493673,0.7434867721975275 -0.9785571333411374,0.5832175521653102,0.853843573868573,0.8216151011809336,0.9416476758813731 -0.7378471507613225,0.4099191196460761,0.30798243022698324,0.5275078388616355,0.32374094713400736 -0.9447994010617905,0.0871353037964866,0.19672529954664575,0.40324818062756373,0.5495949839791854 -0.5845723604001796,0.8410695153430464,0.2998495178584206,0.8048722490912587,0.3073281614984268 -0.29552811284976366,0.8428469047877142,0.13944329208841832,0.9353212444051652,0.6867191238671716 -0.562172980084089,0.29699438185246285,0.6868797910964971,0.8440912397261736,0.6924892928309406 -0.8275565062190012,0.5838464572590725,0.373326309828353,0.4666727929336698,0.3971065310519698 -0.6675069853408093,0.9728529176144518,0.8457025218448182,0.20226214784716035,0.4991332333524665 -0.9096058810702484,0.9993120080310351,0.9545225272090656,0.08463683516510512,0.2740255421610679 -0.7254248280305533,0.007173827525889465,0.7150101571661654,0.031201187273819264,0.16726175156819567 -0.7120356254935181,0.9425695310978616,0.03343159362212955,0.05576377596541049,0.5737543396649513 -0.5576363594647847,0.8886996999324004,0.8732841659128053,0.730764438657685,0.6789430265365074 -0.8146593942805803,0.8475301467542916,0.7547242729554852,0.2048232850677535,0.5687044832496833 -0.36016432767567963,0.6222703953488107,0.980178098408297,0.7991859855194042,0.9275831295036155 -0.3741815427221177,0.5850570891395831,0.13180068374738108,0.0004446183557327288,0.7074925753101013 -0.19297483160350115,0.9036432231820007,0.3195967411046676,0.14720002850319513,0.9427218601483345 -0.19092276413983789,0.8963735239187084,0.14452234716868806,0.43837289578621574,0.861878366739155 -0.8539789386945219,0.7104617895384827,0.9155617884582572,0.7951437425689705,0.8058181513065145 -0.44339640085287635,0.6259003260912837,0.9322528646644016,0.16343504101217943,0.7490742027506874 -0.4054006493099229,0.5718274216544864,0.08575909794857417,0.6199660193324404,0.8204928776039269 -0.47506650538572626,0.9661716573495945,0.5766267401682874,0.3206552090005459,0.6794122301877372 -0.8436167592491796,0.7553302902041658,0.10118468852624707,0.3287390949986758,0.8842263533349015 -0.8820345285062106,0.06253940041106021,0.1737208213397642,0.3543941724768839,0.19728578005536201 -0.6213418954935643,0.8357832716894009,0.03236666965986834,0.8625914701052246,0.06421602135789894 -0.7799384030417916,0.4067682618985582,0.4452345222510248,0.3557289316861104,0.3976545941429347 -0.8809143265158621,0.3282794887225893,0.6765347983659535,0.7286877722269026,0.32047260471838557 -0.5204365916304459,0.10989366844084014,0.7985809678093722,0.44522149738188443,0.5342919616655044 -0.1327724282504884,0.7191495804956752,0.6493026081922677,0.9028290645318279,0.5886718753509224 -0.983610105517651,0.9693229361866718,0.6598382865411838,0.45292597267021006,0.8193736686878103 -0.7021580183964725,0.5384490666417429,0.6873383603433448,0.9057939128470562,0.5726016947250073 -0.3750718720848163,0.6425477259390162,0.5962535184998441,0.29156923328534934,0.9010299590077822 -0.20665366596660162,0.49222197026011527,0.6599013045023,0.8200117065609402,0.4986108133056344 -0.6258502032502326,0.09057709154999416,0.038978506798799994,0.22773308390671176,0.9277149927215927 -0.2591616974161479,0.15906610667129406,0.5879738816855449,0.14523816758727182,0.2402062924887769 -0.6753188075667691,0.014234745281166306,0.5944050780413204,0.20816369960162096,0.2219028686207084 -0.3458641340459163,0.15591441166983777,0.9397187839242138,0.42726966109534903,0.26949585617296723 -0.5957540610745337,0.17769800353011744,0.3170059246701302,0.8294713379770464,0.28642710837222807 -0.15214236625670852,0.20768805213415242,0.7543596528167207,0.17563744290140382,0.7604544487008349 -0.5324766190602487,0.4511926166471313,0.058272995979225195,0.8760482070358028,0.38409365572313203 -0.8982337010406493,0.8865163563135802,0.8079659026022677,0.8740019435576946,0.9705975226585791 -0.45264543021202386,0.7374960943148494,0.18897022601700486,0.8686481472624915,0.12442779942320825 -0.8585798008049597,0.024059081193401388,0.8558925878209671,0.7750242202716786,0.1671311610237317 -0.19173310782940844,0.8159434662940922,0.4883803735203951,0.5100456537051462,0.37763597484131006 -0.6649587633330757,0.01041178577773183,0.8682682238748023,0.7626604619516371,0.21399387111930712 -0.6919149200178973,0.6328783008917277,0.581064967091261,0.4488925868686935,0.6587392249709462 -0.15161607955216105,0.5339480355055902,0.7975157189598092,0.8942761787866269,0.514630376499041 -0.12044340930189323,0.5478559858680612,0.6612044059783045,0.65971417353928,0.7377905002724437 -0.8186539004177295,0.9770660305209766,0.9072944906316612,0.094417320293398,0.20021831725609363 -0.9205258641339387,0.018877589962135177,0.6498659852688285,0.6660138963691559,0.6075718119288291 -0.7529279419666134,0.5867701208518924,0.33374942828614773,0.7390739745866809,0.6899687602586984 -0.05773132850926921,0.256303369676637,0.0505038219809506,0.2409158159120146,0.40459580387537686 -0.6364144201516725,0.5195104790643214,0.45429992065455105,0.06255538857331233,0.12094819524620382 -0.5937737658223954,0.8942219032178192,0.6280245301759507,0.42961459665007073,0.6313601391965382 -0.4889202762282425,0.9560837107918317,0.2630090957819037,0.7852334357641005,0.6130397373173998 -0.5261812292947291,0.410586987991322,0.03361718213342424,0.2630248493077433,0.22019206365589783 -0.35849884046809377,0.5543474221203846,0.49581880855614324,0.28168558858338477,0.43566024646585133 -0.34765948959215187,0.7394633507972804,0.7262073224406673,0.5004272490396358,0.6689366492037471 -0.7479319921138762,0.007257459172577141,0.07447290335622025,0.5089670158768163,0.840428426064464 -0.48495069142735303,0.14702289867824514,0.8259837093236131,0.07826414324174491,0.5835626676530996 -0.026637292517562305,0.9611012154484645,0.4803326242025263,0.3399315388827846,0.8170185163283401 -0.3063393612402848,0.9982633030948801,0.9001015870343231,0.24152589345662656,0.42594672939960254 -0.8163650797016173,0.5678923368304899,0.3714805938265334,0.41328383524987167,0.19136924626375906 -0.8526415418621073,0.26095724864638026,0.5847641291786188,0.3927328000436314,0.8105259467151964 -0.41572049880195905,0.5488068917646205,0.49651490960536493,0.19281326807253518,0.8770839664895851 -0.3933006635869696,0.47331349389170885,0.4192353347228629,0.6093217129920303,0.8610026198650798 -0.6235252560852899,0.5334031566191612,0.912037880342899,0.8067086908193536,0.2445596128014501 -0.22030673042242654,0.5027008274355805,0.2054748050298183,0.04362258114963513,0.7241432698719643 -0.22511051918104485,0.41858696299231424,0.5345697171716814,0.3056136922156619,0.9356901692502219 -0.5608037723188164,0.8771448987913365,0.8733963137026634,0.44235486817069436,0.6639508265634635 -0.6565050380881582,0.943981700348577,0.5822949552492498,0.24188449029034564,0.948492439677151 -0.8867008390597515,0.07302655573340044,0.500984371775215,0.6972472218986945,0.7821042351483646 -0.19572143047731194,0.7490435164487307,0.3121361851188945,0.38776518817220484,0.07847536386905185 -0.35627760633772887,0.7390275450381808,0.9335881022481216,0.3527032132663309,0.21672568467524644 -0.6101077347205939,0.0036526736301873974,0.9068677633896842,0.4899664083487518,0.8701357193637123 -0.8860147659582526,0.6265513385375802,0.6436788285913693,0.8521208519137449,0.6261314986875989 -0.8466787480675659,0.9921596125052959,0.35376060055570435,0.3248540768738426,0.3299150773225855 -0.244487575532509,0.27642409900618103,0.3645843819340093,0.4828494134909461,0.922895050028712 -0.5710471795087152,0.5254167625904782,0.9257335713813187,0.5964371867906351,0.4515024199414117 -0.27778681300890584,0.1501998294036846,0.21784779343027472,0.5135685432856023,0.5120503889452015 -0.18302454277155755,0.38716092881742437,0.8324263845458371,0.835332323451199,0.7766612658530485 -0.11909601272324088,0.6300849750214,0.8769002773422048,0.9046220910454973,0.43951416079900985 -0.8626310878466855,0.28500042803679493,0.30305211068286564,0.5518987475980922,0.875699601975807 -0.7871453903394735,0.323669018707884,0.5288314752116172,0.9225025618403538,0.47587081096101125 -0.15022627686290824,0.1360336855845371,0.6313996415814213,0.4976295576283817,0.5912285256641532 -0.3284515104903939,0.3870161291792894,0.4734763663326971,0.8604029124886622,0.05720363896266689 -0.5085777903984293,0.6951465712693383,0.8739045971283568,0.36581898591693474,0.16810668734001177 -0.7564981803059989,0.9378717597497568,0.6037518915578182,0.20885585735974954,0.9937145499449112 -0.7793235038365439,0.8983728854204939,0.49566493615139273,0.8129603807981682,0.7198719782227972 -0.42752037615313865,0.31887932044027667,0.33530551348096016,0.3700772274583174,0.7467166575101637 -0.30139536815115375,0.7082403234739959,0.6139758947906373,0.796217828090473,0.8089079037162062 -0.17128571631200984,0.15652697928067238,0.1260578309735687,0.34071302878724474,0.5289980904940338 -0.18225652276695026,0.05512101362369404,0.8082665290171991,0.3527579864613286,0.6904828484295669 -0.6664703044720457,0.29961511116392703,0.2609874430423238,0.47898529771701737,0.012341515678240156 -0.3059755948081405,0.3873762064709627,0.4899642234037891,0.091658817481443,0.7318900704632391 -0.8943890762660259,0.9917835465517616,0.5390040971026095,0.17720237737203215,0.7686581739570854 -0.40303516756383095,0.7352621192902989,0.15464947862427925,0.6408817054001226,0.15073002005351532 -0.7773945264964609,0.7358647000892965,0.2925375471829782,0.6695408500203899,0.8558078808069342 -0.06527775201625041,0.5653352689650459,0.5711877411946752,0.9871279181804551,0.44809306333448196 -0.13409266981846168,0.8984371174991395,0.33334782177084055,0.6005307541455309,0.9355867139832197 -0.3254499297369676,0.7997702728732164,0.812863504254356,0.9189586984502873,0.1498256030632512 -0.6825793929068927,0.2853683498131806,0.6665245032133137,0.8377146372327806,0.3589987924080491 -0.007086750918808726,0.20146603947149622,0.32928464004203895,0.9353041480688623,0.3013808720730242 -0.36570472641031915,0.18669693802541443,0.7821347494474481,0.8533978178978673,0.37317866366381847 -0.44357323647336155,0.29810929645080486,0.4634922388068812,0.2738938866143179,0.04299148520608631 -0.5864528443030136,0.2825537901799383,0.1751789868240673,0.5590773607711157,0.3336097068307926 -0.0440085740878875,0.8472412772055863,0.6182313348692414,0.3829493020888839,0.5177001696081633 -0.12927753389958674,0.7915653619191014,0.2769923821921937,0.06051281733614777,0.10699838859926825 -0.2585928931503997,0.01291435443027722,0.7980540515617356,0.08825008946030666,0.1510059219632578 -0.7208895403405823,0.7080324128298718,0.6335561014773644,0.7087478960945752,0.6847810232396448 -0.8641253690934467,0.664411288561791,0.304418062734521,0.31520366123197063,0.05067589796756167 -0.41324455903353163,0.2548238121775781,0.49468892500408246,0.5442521334710371,0.024860795351863807 -0.3106556055406543,0.7473918833912091,0.6709877724194117,0.18883160970937296,0.8083650571528715 -0.9555668941738619,0.3833644347955859,0.1945832429672426,0.4118644184108806,0.10018361755010585 -0.9946094653806148,0.6699627920311184,0.3901650325944801,0.7656637690453131,0.7711644479198988 -0.6184341031437617,0.865884950627049,0.597455453404241,0.4604708574613291,0.8513476929437568 -0.18133702923306771,0.8084269308863945,0.5523385536902609,0.7880562623943064,0.06070709117461082 -0.20242173401117036,0.4741378650058369,0.372859717353202,0.3004565315156863,0.7073928938022385 -0.6397294886137421,0.042113628206679854,0.34521275426294773,0.6079684234452359,0.9114828995132029 -0.1942612488253329,0.3090173081029566,0.8521050783963491,0.5659295760418022,0.6714541387159048 -0.7386977953319873,0.7954930505399861,0.3948125651359785,0.5237593914740467,0.31875498485040465 -0.02591639395740397,0.6868772795206854,0.24238098993691282,0.7470198638366462,0.6314592696197532 -0.39330116943463145,0.20235062001101545,0.10924952073215677,0.18965125015574857,0.041374185486900306 -0.5977771762563737,0.7295093614146536,0.9116355811996806,0.9600137947948346,0.5938383592817765 -0.14068471874119215,0.8528195496862404,0.5129189627290276,0.39062412270080216,0.6073808653482324 -0.7090444860622662,0.42381934578761493,0.9462640876849263,0.057869908977870144,0.7939616658507797 -0.8993267726601832,0.8082966819986275,0.21332377485510756,0.15085935451996613,0.9715894291541076 -0.6414786733029935,0.05757812222336467,0.04683980669311172,0.5868515937770623,0.35724676399177413 -0.37435045044306225,0.22854964898115038,0.18825506321767138,0.5703114947692098,0.9679478395319213 -0.11007607149534104,0.5640577499020142,0.29214004786225856,0.958462200313717,0.9644099350030974 -0.9847968141127248,0.15945007613925033,0.25120260597112765,0.32704615343635635,0.346577778304731 -0.5108963115531392,0.705968633032463,0.5501803106188405,0.8875780006923177,0.7048073399223784 -0.3546417194956136,0.4158004557665195,0.9636947651384472,0.9644909380501403,0.8017059822206251 -0.49922925274809027,0.14568220966509415,0.7600245696709215,0.6804344313296321,0.11018795965469497 -0.7934155428357226,0.8250294419461488,0.17421091560815083,0.9752001207472589,0.7753026396368685 -0.4889573817840701,0.09782476388237371,0.5496276297913293,0.4228842799235406,0.3705935766674756 -0.8887452359833111,0.7701066632360896,0.18250416483187748,0.09044511936694499,0.34392492857672874 -0.8005171042056015,0.05964443489597371,0.20817975038316772,0.17220842411402437,0.6627943040834179 -0.2634298654909757,0.4965985401372146,0.310517660526491,0.29630972749349127,0.468781521067677 -0.13696603350329828,0.9217868705477634,0.3476968843970979,0.5298182936478621,0.2476548310210025 -0.2903559893843479,0.16041240707911775,0.22455650633470414,0.41057652648437015,0.8974455354736222 -0.26829429346121436,0.0697025407605949,0.08134979643844331,0.5766129958520101,0.5623134712952204 -0.9591685981460986,0.31299451440431814,0.9508556466638547,0.14388680636789608,0.9498584968500938 -0.19603642252196285,0.10674923760102861,0.9041136273084708,0.8025593647877816,0.5595225691562173 -0.07363294235522877,0.2711718068340584,0.809437599477522,0.14948792130431598,0.9370675999571696 -0.2274451880979922,0.01930236624413806,0.002327750602396983,0.8701374913333189,0.9502652624838932 -0.4166896505181499,0.1202261122069107,0.23057338673076,0.9262964904859582,0.7552948749451336 -0.512613498926788,0.7975385055765047,0.11551437385170493,0.0453776299978379,0.44056380636666825 -0.9117431313737648,0.7118317480833339,0.6719357337561388,0.3439263351393479,0.6398251157637396 -0.1523917633310231,0.476314772142694,0.001118872189098119,0.5156938092110201,0.3173106560935455 -0.3390048940419216,0.3987676992402708,0.0968642808833523,0.8388468784999696,0.5661612662357275 -0.8367374094473154,0.028091318651062624,0.4102813140289928,0.6463548566013528,0.8532038506727285 -0.07237398904404924,0.03665298283429308,0.6404978610075415,0.23317534546050522,0.45107930635708726 -0.718628153230472,0.9255942797354131,0.8986934877555551,0.07302765010367396,0.49163503953372023 -0.08225369872816757,0.45733631072272685,0.48412868552287536,0.32909422489925044,0.18704035719576473 -0.4695584768487172,0.1751663042727123,0.4887640616071016,0.7340273692677151,0.9706710401482453 -0.23455694525508686,0.2248585716819752,0.428173601673401,0.16440811621722384,0.15244267389588873 -0.2921584024137558,0.7872121943297156,0.2764103585959172,0.24904797516698718,0.997110075026289 -0.8682621530287616,0.641474608657545,0.6759452331706339,0.5485967641080314,0.4900974151075108 -0.2999677343277628,0.27306709201275203,0.9860291356254376,0.8945403279130871,0.9783062714158128 -0.1781323829002427,0.7571657875657584,0.008177971394228822,0.2302169782354445,0.8274623545337872 -0.32024734046717807,0.35809250874460674,0.12161949777499192,0.20707184379367083,0.8332213463648676 -0.9334811133603967,0.9743439567945317,0.8953098575419535,0.1975056299350082,0.42696996098717677 -0.2610679976491427,0.6928876878868646,0.7387099984571154,0.5755096480987824,0.3935509949373013 -0.27087424258569237,0.8176348762720685,0.12807383598781064,0.10851138830865392,0.13049767934995793 -0.20665853899480158,0.42644865761911255,0.07596547349521798,0.7007490866511796,0.27785182264581565 -0.7653316530483343,0.34677244547972985,0.32277697697938545,0.5583536376206093,0.07647442681799621 -0.2474620703759952,0.6000704189079327,0.23783089794364998,0.5232685548782293,0.5007253526743881 -0.5586593762102094,0.07815929642289332,0.39268107965146937,0.739216551383418,0.6330967377015079 -0.4993466398672184,0.024220760828632892,0.974122020587236,0.44618734324795695,0.6278981724592724 -0.11742142928495003,0.7432287099525839,0.45685657888668674,0.9824805818798799,0.6807660392817135 -0.46494952977271364,0.16160161575531096,0.853160384059044,0.6236918563870306,0.28487615641122266 -0.8966137280560189,0.9942493095629743,0.1525149643424093,0.9028488448128814,0.5687316823299806 -0.2219867139855145,0.06273081703775074,0.7275537480006886,0.012333459186922524,0.9161737966807868 -0.10817165860258937,0.6115424461918687,0.3092652169851944,0.791426442141872,0.3071833924912811 -0.08039954081197087,0.15119607421091785,0.9849207747724451,0.9743846796801151,0.44189557550095704 -0.6821377458006891,0.6832888415190825,0.05638258718139877,0.8253952347716951,0.4339006789213188 -0.31842931005563313,0.691481782818412,0.8840430971101603,0.9300037314426172,0.46354151395360743 -0.15785252967390762,0.7902225900616803,0.239042241751382,0.44366133147512266,0.8199137460646326 -0.34000608927647813,0.22596491685430242,0.01042840324638139,0.2970921789516755,0.6099192364439865 -0.9775687109717187,0.0707965846423314,0.31656499554626116,0.4534834814690455,0.9226278448655716 -0.8005672970428545,0.5956875644994135,0.5405748880266737,0.21728899988152017,0.13600245929717203 -0.24481305612255244,0.9982197359233125,0.2550239638844577,0.10589323680061347,0.6159951702540241 -0.5579185644887193,0.48153259377167656,0.2503198750379535,0.022674193968150735,0.5834716444016353 -0.6535604544645702,0.32204170463129045,0.20108300387343347,0.36841900017024953,0.14214075558182038 -0.956049141538781,0.5814111624510557,0.6309994232799752,0.2972460474954379,0.8983110020126315 -0.5498122935236129,0.9315071644964158,0.3778167413568866,0.5789098140665498,0.2634891038307212 -0.25526960380882324,0.23558532413629107,0.030212969802915213,0.38792080706260135,0.6791958469009284 -0.02822125330337999,0.5212813326797322,0.45461717851475014,0.3448117016531671,0.09309324213887771 -0.10471772650725608,0.3285239032485112,0.8825418023370671,0.8835863709706466,0.511967247031543 -0.013128026550336513,0.21864648964904454,0.4066873091839456,0.18030528325841844,0.9367429833809753 -0.7580462589664934,0.31748523538591067,0.02171168567512094,0.5176333712521192,0.3435633316817548 -0.7614165649639613,0.5549239353104213,0.7293236899682891,0.12764571650904766,0.4013463341786041 -0.2565681808320195,0.26665809794686124,0.6324570909349416,0.5790362585652593,0.0025121860039620802 -0.3457675021035356,0.32562120484299284,0.8633341591423308,0.9487924821088674,0.18508071011502103 -0.0928190289681663,0.7991392764351598,0.19181554869580186,0.8940601584942798,0.32616278006303223 -0.5184990604103303,0.3759265847905594,0.1827998609500433,0.7774535784160386,0.07932652381420424 -0.5317547243492127,0.7153421660181054,0.6880043395852479,0.805671011741422,0.4915158093858232 -0.09429711686766451,0.047551990931078425,0.9218185117929207,0.28595160550367404,0.3851723284486166 -0.8420276633358456,0.8585721772771943,0.04385095552500806,0.12201863199444263,0.8676038303563111 -0.7613823882326874,0.9888976387212683,0.17251139443774977,0.2804574901390233,0.3221458547164132 -0.19805199019220965,0.14081810299856057,0.5188525076412507,0.8692410311879841,0.7449990661799628 -0.9435554535553152,0.6670573771162974,0.1299405011943099,0.10548116155482534,0.5903629061859768 -0.37665498872306646,0.5760473423850961,0.9157729583798281,0.3949707226802599,0.006074888737049311 -0.9815775310553655,0.22691454887822515,0.04156117313378749,0.6351872999961914,0.033023303806995474 -0.4885943985001263,0.337434890729018,0.13936335297484648,0.590095693574408,0.9995085043387899 -0.9630196615155237,0.02305974654253462,0.5298732477894789,0.014475494186377724,0.9829275396700557 -0.8896863304539822,0.20662770678096876,0.9093233418317429,0.26834304885433913,0.3601015364033239 -0.980743808185728,0.18903323400153227,0.02752906895135032,0.7797029572533906,0.41411467161458526 -0.19347312291270968,0.5714695445902274,0.7107592032928783,0.1866873259052121,0.38898425930532154 -0.7608792260280651,0.07779355197619586,0.5329635018186989,0.5500118023966452,0.7712220966939982 -0.8185862089452575,0.41974643287062796,0.8918808409272648,0.1351621349330291,0.5111958426341436 -0.41099493379210583,0.5379534334908467,0.19956516317134432,0.22259888969298702,0.349366128414489 -0.07672819566950406,0.3151582615267824,0.4788330442189874,0.6365659922844624,0.5631577639536857 -0.636466314627979,0.26164429351038354,0.4579478328695946,0.6893377527418549,0.8964895068551595 -0.8111423890637954,0.9185107490650698,0.5643239749040746,0.45217221781236927,0.3326424216842675 -0.9585648244148416,0.66714258340281,0.7149857969224049,0.0033938496366685555,0.7028956878143152 -0.6351418777237765,0.9500024126211991,0.5363719585305005,0.9994796312524039,0.3263553766640257 -0.5374947209998718,0.26000903044686297,0.12512476394662364,0.5128225318492218,0.5665007578988824 -0.13704464720249965,0.9355843129530472,0.9407132744656121,0.28741165198333585,0.8810353060020545 -0.1571580268819519,0.8381255705748623,0.23997477403232415,0.9701254012625736,0.18860145913546722 -0.8478319744466787,0.19627609311119565,0.9080091844495721,0.996128594794506,0.1543757003427617 -0.9731428466940805,0.3202858134656348,0.9440841456730514,0.08255087392425808,0.8899852001290709 -0.12943789855399113,0.33184263211248743,0.43689153090182753,0.21290726926112513,0.21897338152918766 -0.1434573945934069,0.8932586626084963,0.20495285313434775,0.00990181971828763,0.43202340594626876 -0.010594400390133818,0.39329470577225845,0.6525701781013633,0.6194412186639894,0.5881148254470482 -0.9939346951735459,0.6073586199942025,0.08469062379215464,0.1239092214817904,0.5866053571899961 -0.1984250333761719,0.8267117260086471,0.37832749815446975,0.6846835500282499,0.0117114576017745 -0.13725209929802284,0.9348721833348356,0.06100854851341386,0.1604218179296547,0.6128768495070979 -0.6222851948139967,0.22178486310088708,0.8660719645456938,0.49665270257481553,0.32546173651681365 -0.5736846846897612,0.33352221347216826,0.6546727598833135,0.1541898031710438,0.34567473740150834 -0.6786641379067748,0.9538012673452081,0.9981995044739337,0.04153264565752102,0.12011832726271654 -0.8712398696766221,0.06772509152080985,0.8377791528598547,0.7911067402216427,0.04300724753878438 -0.6489867930468544,0.49392040997752407,0.9606474666927375,0.9696769044816589,0.23783062475398864 -0.17716046804590757,0.012887333841340443,0.6194773496889645,0.07792242168190888,0.051507758210805665 -0.6458735229686383,0.39940887253132373,0.46076342707823514,0.12048992262417157,0.25444993726312404 -0.1087089662115357,0.20308846281887316,0.5256123131568852,0.8159760671069739,0.21852787719323485 -0.6012484460918093,0.260940864178938,0.11561615912091983,0.17469536397329666,0.2814226852291244 -0.9855932134333806,0.7702509232918234,0.2593398810340133,0.10013213521013098,0.39058663863426324 -0.6367884917898758,0.41479645146960753,0.2350266953900222,0.7467983891930757,0.08911329804684853 -0.9344125089516331,0.08060771398709021,0.940944445526121,0.4795355481457738,0.5677519634650043 -0.6844249140721106,0.32410734754909876,0.8270345090091223,0.3932273600229045,0.7977691492038346 -0.287573676458005,0.8815138940673685,0.8012610357617619,0.21572942300974818,0.7263525359254249 -0.4189827787431575,0.05998243797722358,0.4658377330021455,0.016144622851663515,0.19848386052085154 -0.8125725338986987,0.7792542308751856,0.2772467900977089,0.5128316426841489,0.5516724187113834 diff --git a/data/raw/f_694_data_simon/test_data_2.csv b/data/raw/f_694_data_simon/test_data_2.csv deleted file mode 100644 index c3b81bff..00000000 --- a/data/raw/f_694_data_simon/test_data_2.csv +++ /dev/null @@ -1,101 +0,0 @@ -Feature_1,Feature_2,Feature_3,Feature_4,Feature_5,Feature_6,Feature_7 -0.04872488080912729,0.28910965978981684,0.7209663468312298,0.021616249915949792,0.20592276526743902,0.0507732566953768,0.30227189396461684 -0.6639102946247,0.30811439323687884,0.5835912762185987,0.06957095461260054,0.8674044839930883,0.13324051925174774,0.17812466155949835 -0.4959295498261558,0.8636996445582236,0.7589438355121143,0.9704851253491577,0.7593025527855528,0.3842500320901294,0.40871832944373254 -0.7133604278844787,0.27066977267313375,0.854102871991344,0.9131639696118056,0.7607560696646062,0.5166685377580662,0.16783576522150734 -0.2986922916088657,0.28394304503063383,0.32721257661773095,0.46014033379297603,0.5443683594576376,0.21740086975972195,0.7998683269389765 -0.7266844492965354,0.8492927133912169,0.2675218045820723,0.6148247111484346,0.6527560731939891,0.38681643413428557,0.41035417857281875 -0.8101575494926372,0.6003692653821349,0.8983403385846943,0.490032225049018,0.8283181322162818,0.8588894711682389,0.788445143641093 -0.887655437207203,0.6988607889524882,0.4543903076107496,0.3059151647888555,0.8200844049259268,0.13966465542401996,0.4012964984806886 -0.23121231494033567,0.5904577635433566,0.8862680579351458,0.3277415904125234,0.8046096520196478,0.7974528394679815,0.6656936477741948 -0.24411249672277346,0.8193562457545637,0.46760119044811566,0.7391087269840894,0.4606464683225363,0.8640857495729742,0.7021642920041132 -0.16797507343295692,0.17924647312516861,0.8499050285500909,0.3651122468419854,0.38103513941087674,0.04120749026614079,0.6341630769147428 -0.8041783424451304,0.8973128976410044,0.19476348252875664,0.8573423604038247,0.17672956273072515,0.37352109865034655,0.38227876105705216 -0.570645022015523,0.5111367165411828,0.5290773087427948,0.5232026990109716,0.9318775575930066,0.2465759340069429,0.20830305100300806 -0.8160303900904469,0.3038669947153547,0.6753923909128707,0.9236580482441975,0.06996941969626091,0.7817694426024975,0.03510298901951303 -0.5090796626394611,0.4387916734246067,0.6905217829138505,0.779246629250854,0.047229130005625786,0.0745612011704414,0.6048445389025288 -0.7123500026732243,0.15077454817162717,0.6147635745007861,0.1508509880160348,0.4617322192499923,0.9802632788472591,0.8435191578068069 -0.6919147835529168,0.46279892782768084,0.9167736205660134,0.7325674873082761,0.002100500892840307,0.6729292499766939,0.578746040618 -0.4790055255964919,0.436119137508724,0.3633094554071753,0.284143272181518,0.7720255005303132,0.7158187356220659,0.16283091474667466 -0.7607077729941736,0.785828168637876,0.7386065968688063,0.22880153646288492,0.3302114813466831,0.27613222085524347,0.8975984079750092 -0.9251225926221389,0.6185755652290547,0.139303583663965,0.1375290754165801,0.3658679913753601,0.11546953773860269,0.5118326790353088 -0.2593561614943568,0.1104778922193611,0.1881140530715265,0.3239591662101018,0.6655391432760631,0.31195917200078416,0.5239487546148249 -0.3337684173003055,0.08833486468591223,0.08828575481211265,0.25229287508524323,0.21825894513212662,0.06268918940131918,0.7777770704364103 -0.4961321314455126,0.27425062088316965,0.8098333048674894,0.3376761198544125,0.19561039162894633,0.3419699532877356,0.6727698454726747 -0.2831144435519859,0.2656556602845208,0.881141173495491,0.5391299292549835,0.7437651349932992,0.8978085074853359,0.27396109208909103 -0.5334355201833139,0.1331934419386872,0.9618551907744723,0.8124559275737842,0.651408998286002,0.2022574505771092,0.9708228029082584 -0.03553603828633134,0.42323422628588137,0.9444142426814162,0.3192097458549269,0.7769142970787677,0.9750916000098694,0.7019345905358452 -0.3025290730659017,0.9686639047004758,0.3957131929786455,0.9513090633355276,0.1294060376887719,0.9370641705527203,0.7571080307681086 -0.04611928160405354,0.6628721448433833,0.22657122914667616,0.5194506355698844,0.510675296501964,0.10754192763516712,0.8150634251790736 -0.9624530170823145,0.5888823929995796,0.40425205066472514,0.6558769923812608,0.39574151649323897,0.5952190691541637,0.4326731561058632 -0.15286971031914565,0.8544956642474094,0.3593340703796415,0.10231052972199373,0.037249817803254426,0.45593234608796673,0.12814839176373083 -0.7037350320896826,0.06339631150714065,0.003030924966509696,0.08914943212552306,0.8058622799538673,0.7011904444312774,0.8600502014300897 -0.8016468415350945,0.22864052835331483,0.5086943502391282,0.5405698078481478,0.2937066146357874,0.3692151463743135,0.2702639872994994 -0.09331338995400074,0.7531320432767207,0.7305306979115495,0.003625425493034462,0.2396294240336183,0.2697173393632496,0.7916557444698262 -0.5074342880403877,0.4656865577567181,0.2628851483357575,0.60315164156873,0.8760745977668422,0.9932928263509083,0.026905988350189647 -0.524649445545466,0.0435690573567461,0.8436637069019206,0.6383106093992049,0.395954046904182,0.30695376068349645,0.06134432300631054 -0.8965455494026727,0.9106839255826196,0.9509448104031459,0.9197092755656782,0.03180339498236395,0.8761434995768898,0.04220004399144828 -0.9806292250218124,0.7588485609998409,0.8946427884940492,0.751642827216142,0.9061552317532247,0.7408657015286068,0.9050769491462896 -0.02784998733220334,0.680111829304676,0.8463351990627814,0.8874834768942007,0.04180538641387077,0.4782711941015647,0.5828304606223498 -0.22233237386421167,0.4316722652299496,0.8370181357941029,0.9788176306444385,0.6687691997324811,0.918201421132698,0.1455453626890275 -0.4514123775335215,0.08764892380455869,0.18289419417475994,0.5380256954392815,0.6783932304622841,0.27146251186434067,0.6404080883047568 -0.8798250034439924,0.7535416374458571,0.5703652735244995,0.7062395556542507,0.3309677267908161,0.6163575829009478,0.692341206504478 -0.04533114322328202,0.03663894959680214,0.3443890058945188,0.8103656962079075,0.4253738324882488,0.862093006641847,0.29320362584870374 -0.30461470896878007,0.8825519257089836,0.3791235132960482,0.5071836857242087,0.7956068721554997,0.3360210785590967,0.0049676848041296795 -0.5852012705978576,0.34964726983312655,0.4538988277374828,0.29253149920793964,0.6555555535452797,0.49167565114064393,0.8167978981586694 -0.36354242914209145,0.2429424780007743,0.5314549286428287,0.767841999482552,0.935431903629896,0.26247512993075295,0.06851284368172861 -0.3835954243891473,0.7439127195221054,0.6033406896209059,0.7660054540983249,0.5144218155750783,0.33329854792449276,0.6217212985122265 -0.7502762622590075,0.6314311703797825,0.02153771428633222,0.3291749019059642,0.45503567478884743,0.33979277877985636,0.15359864784749933 -0.6756287768404942,0.015805383855658328,0.4825399829968129,0.030914548326151814,0.30607242741592144,0.9617913427430909,0.934241558650455 -0.4206731444685674,0.12159806492552605,0.949021207903565,0.9463700507760268,0.3677795940479821,0.05313955189247932,0.12403999137177568 -0.8973759296617655,0.0826006013576196,0.4252181271132134,0.27970332756374894,0.2253860796075613,0.09116197368504053,0.001041268406025253 -0.16766191769354954,0.6051188943104496,0.3293731006262075,0.7031923175152225,0.4547836491398508,0.7172598366201107,0.19199141985442447 -0.22678268134127544,0.9441154001281035,0.9115270250821804,0.5725248852403532,0.9789210070183947,0.8659803414529417,0.8762614678395729 -0.5271668652417938,0.11654549760863497,0.6279946670452818,0.9636444396224736,0.4402720731902334,0.528177442504644,0.10351079433945365 -0.33096637528167694,0.14179651022122253,0.9055778471511701,0.9953161919078285,0.893906313094851,0.6970787848973985,0.5412861970868947 -0.37191883215271204,0.2932866656556594,0.7545060399202241,0.03466574859325855,0.8807050526394171,0.3795908458386269,0.29966753369942234 -0.7705574741661891,0.4615883472629946,0.918071526530814,0.5287113279667562,0.8853516207885094,0.5174961875908906,0.9632753429686419 -0.2574148450642436,0.27234139995879203,0.9930365381809627,0.8415902997855221,0.15948403291891267,0.7372697834960188,0.41831510689322937 -0.12153003643249449,0.7374661572889655,0.991410908006118,0.8444962240835847,0.7496125872748132,0.4065235404340829,0.06925755859446903 -0.1875627141134386,0.7754258217128089,0.5652326866875919,0.887086281321339,0.9725406863573596,0.9183449099250861,0.6903394402951439 -0.20310978211056419,0.7580290143153408,0.45364340053701413,0.7749363592155285,0.4731223732968437,0.5312042815990242,0.6180601463673413 -0.2640318645126798,0.8847173587162935,0.9095768612627534,0.5738300477807412,0.5620370511327754,0.5995475376529597,0.7407614575009118 -0.36007083253399985,0.140913906568903,0.10501362952375426,0.6774049470784744,0.43702201422107423,0.6256514652659259,0.7878736009217178 -0.6551778191435359,0.22531560787015104,0.8684861137714368,0.9016399931389795,0.2032834352698012,0.235844119189754,0.23731621510740064 -0.48449692544471556,0.9684557934111018,0.6101891710550239,0.290762477938203,0.8117234558853111,0.7814815335490838,0.399238847967476 -0.3654805224611162,0.6001163889944028,0.8136466070244321,0.6747982899218711,0.0902467050301855,0.055738070564556375,0.7837591925958689 -0.3003900708073355,0.10247720856178577,0.24054595623502584,0.7943364124207012,0.9948124987621443,0.9181239605074907,0.08805908543688179 -0.6617578413264622,0.3529444465226492,0.4754019560775814,0.4925083057574364,0.383077645379008,0.28662824589665814,0.6314974818864937 -0.36303131858094495,0.3341075161765812,0.20340995788453708,0.9628134084398605,0.4303678378527587,0.4164509804023717,0.433561204381847 -0.6206952610616253,0.17198425146854635,0.09751378617484696,0.8190617468774339,0.8263267572758102,0.1875040547330149,0.7328533462577185 -0.2525073345182196,0.3521278420549254,0.9736209921398273,0.7391507163428955,0.9061583133699301,0.23379522163576683,0.5270254536213593 -0.19172397593022583,0.8789402159620836,0.04771821308893087,0.8357798972666747,0.7176430378282413,0.9221968965843574,0.8196201578123152 -0.17653598915934632,0.58772800074664,0.7455189113176747,0.9754125259449786,0.6447651119070914,0.31988668698565914,0.7903426179436518 -0.017587654975008582,0.5518685933394677,0.6213710335224116,0.2750618699995472,0.3879328484182477,0.8767841398002537,0.9859940469461522 -0.13953886490507528,0.9410867055929047,0.4153128283017069,0.4955604985920833,0.7221056950502384,0.5655953559972112,0.5915072041684799 -0.4924238012534261,0.27791970892988394,0.3433547445519294,0.9314847154443696,0.6238547147393059,0.8320895595365102,0.3674661568588342 -0.9364036594466274,0.5523805878663199,0.27514141745037846,0.7038930284112624,0.25137576062642275,0.5595317394739378,0.505334458061326 -0.05402324792594759,0.7902846240120283,0.21343018117123358,0.05441154212653576,0.2836446585906759,0.5502803543143819,0.529365786177333 -0.6219877815934993,0.14260901227395717,0.987666419949438,0.6548975234088976,0.6296174889705598,0.11765498857263645,0.03559430523271734 -0.9849473268032108,0.1984635544185981,0.25739331345696137,0.27709233244247566,0.254056716257395,0.708064145046113,0.7264029019949638 -0.9748244774769972,0.08604581550486912,0.7886496016778013,0.430404009468488,0.3074499077900952,0.9285503383144637,0.38988160073023925 -0.14719829725769795,0.26308008831795837,0.9048174344064163,0.8603499117991138,0.6512243122300982,0.9503803347924079,0.2529129126739643 -0.6586288049968448,0.9498576587867826,0.24532588906180242,0.14726228350447512,0.9735793371305298,0.9784909975408637,0.23253438203611443 -0.4397540396862847,0.6104230700967248,0.692015513987713,0.7222237617594981,0.47132015699154195,0.06458336274128995,0.9835964600447984 -0.6324713018500787,0.8410236871622708,0.25594586392516305,0.515095007772938,0.8928258700849152,0.10020451641064143,0.16225783716413056 -0.23648430173641222,0.0112282212394752,0.08961658401859895,0.6678947761960786,0.9736157018629856,0.7255564160718456,0.8529216508416602 -0.01198508024544842,0.7584656350050173,0.7796192807461572,0.10525620907394351,0.42831195618564377,0.3062642161668394,0.7424063292222233 -0.15100641181890329,0.26959586457605034,0.5097182070774624,0.9860403350289115,0.39160112407760495,0.19428926049203166,0.8204456486288891 -0.3170412801799878,0.5323724553774947,0.5447047373853733,0.5781460771130742,0.585060263090799,0.22496839163117877,0.1154285286807446 -0.33634015538884465,0.9745076388160279,0.6960494688641375,0.30912731111076064,0.20232696469762712,0.012536806558180902,0.11953938459173552 -0.6589411557838674,0.5490755874933001,0.9519476716183366,0.45816720877324313,0.8756218346831207,0.5935946394065328,0.04081690221291068 -0.06891544202449995,0.21485540935898884,0.8953435819275475,0.5030275387347001,0.09188780950967668,0.24750757187884886,0.14943321171042234 -0.5672264087917339,0.856224052220131,0.3008148926773202,0.7434884780595516,0.3105157262702778,0.7118965582477118,0.9990934085425084 -0.34110093826765886,0.06970274038822855,0.16492483850554762,0.08207816190011219,0.618270766784858,0.9462970626609783,0.22023254047116358 -0.03529151903685368,0.3533372878927177,0.3865391981447084,0.11856378016697311,0.5912848922644927,0.020093550406546035,0.9597964105660389 -0.8952214019168141,0.2948538848266782,0.07540789154292138,0.33146087225175047,0.5360584299976487,0.13801532961213825,0.017709785733321604 -0.19549432923783872,0.46040282489148754,0.5975610010611209,0.42415360822222203,0.22544690839148296,0.9813517684962616,0.67926647093073 -0.928533402114991,0.9939325464303318,0.4196886318426102,0.8331719421451418,0.24090675022344932,0.07548936435370213,0.10716923170878201 -0.5669122023942194,0.49043412177733237,0.16349181840555382,0.4292242896941414,0.5786693039996336,0.15192502789508233,0.16624430931571166 -0.6348889962383536,0.045590690481139395,0.7925849642804497,0.25789439090983923,0.20581638542644043,0.010879614720925357,0.23356663767419417 -0.0031338251986795518,0.27694178381049006,0.3733819830368156,0.9298457626037006,0.3522717736079767,0.03672128154318577,0.9945348337593571 diff --git a/data/raw/f_694_data_simon/test_data_3.csv b/data/raw/f_694_data_simon/test_data_3.csv deleted file mode 100644 index 42af6122..00000000 --- a/data/raw/f_694_data_simon/test_data_3.csv +++ /dev/null @@ -1 +0,0 @@ -Feature_1,Feature_2,Feature_3 diff --git a/data/raw/f_694_data_simon/test_data_4.csv b/data/raw/f_694_data_simon/test_data_4.csv deleted file mode 100644 index 1dac6def..00000000 --- a/data/raw/f_694_data_simon/test_data_4.csv +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/data/raw/f_694_simon.py b/data/raw/f_694_simon.py deleted file mode 100644 index 8146846b..00000000 --- a/data/raw/f_694_simon.py +++ /dev/null @@ -1,111 +0,0 @@ -import pandas as pd -import numpy as np - - -def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): - """ - Creates a CSV file on a given file path with random numeric data. - The number of rows in the CSV file is determined by the 'num_rows' parameter, - and the number of columns (features) is determined by the 'data_dimensions' parameter. - Columns are named following the convention: 'Feature_x', where x is the number of the - feature column starting at 1. - - Parameters: - file_path (str): The file path where the CSV file should be created. - num_rows (int): The number of rows of random data to generate. - data_dimensions (int, optional): The number of columns (features) in the CSV file. Defaults to 5. - random_seed (int, optional): Seed used in rng. Defaults to None. - - Returns: - str: The file path of the generated CSV file. - - Requirements: - - pandas - - numpy - - Example: - >>> f_694('/tmp/data.csv', 100) - '/tmp/data.csv' - - >>> f_694('test.csv'), 5, 2, random_seed=42) - 'test.csv' - >>> pd.read_csv('test.csv) - Feature_1 Feature_2 - 0 0.154163 0.740050 - 1 0.918747 0.900715 - 2 0.283828 0.606083 - 3 0.521226 0.552038 - 4 0.764560 0.020810 - - """ - np.random.seed(random_seed) - df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), - columns=[f'Feature_{i+1}' for i in range(data_dimensions)]) - - df.to_csv(file_path, index=False) - - return file_path - -import unittest -import os -import pandas as pd -import shutil - - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -class TestCases(unittest.TestCase): - - def setUp(self) -> None: - os.makedirs('testing_694', exist_ok=True) - - def tearDown(self) -> None: - shutil.rmtree('testing_694') - - def test_rng(self): - path1 = f_694(os.path.join('testing_694', 'test_data_1.csv'), 10, random_seed=1) - path2 = f_694(os.path.join('testing_694', 'test_data_2.csv'), 10, random_seed=1) - - df1 = pd.read_csv(path1) - df2 = pd.read_csv(path2) - pd.testing.assert_frame_equal(df1, df2) - - def test_case_1(self): - # Test with default data dimensions and 400 rows - file_path = f_694(os.path.join('testing_694', 'test_data_1.csv'), 400, random_seed=12) - self.assertTrue(os.path.exists(file_path)) - df = pd.read_csv(file_path) - expected = pd.read_csv(os.path.join('f_694_data_simon', 'test_data_1.csv')) - - pd.testing.assert_frame_equal(df, expected) - - def test_case_2(self): - # Test with custom data dimensions (7) and 100 rows - file_path = f_694(os.path.join('testing_694', 'test_data_2.csv'), 100, 7, random_seed=21) - self.assertTrue(os.path.exists(file_path)) - df = pd.read_csv(file_path) - expected = pd.read_csv(os.path.join('f_694_data_simon', 'test_data_2.csv')) - pd.testing.assert_frame_equal(df, expected) - - def test_case_3(self): - # Test with custom data dimensions (3) and 0 rows - file_path = f_694(os.path.join('testing_694', 'test_data_3.csv'), 0, 3, random_seed=42) - self.assertTrue(os.path.exists(file_path)) - df = pd.read_csv(file_path) - expected = pd.read_csv(os.path.join('f_694_data_simon', 'test_data_3.csv')) - pd.testing.assert_frame_equal(df, expected) - - def test_case_4(self): - # 0 rows - file_path = f_694(os.path.join('testing_694', 'test_data_4.csv'), 1, 0) - self.assertTrue(os.path.exists(file_path)) - csv = np.loadtxt(os.path.join('testing_694', 'test_data_4.csv')) - self.assertEqual(len(csv), 0) - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_694_simon_chien_edit.py b/data/raw/f_694_simon_chien_edit.py new file mode 100644 index 00000000..ffe6cd30 --- /dev/null +++ b/data/raw/f_694_simon_chien_edit.py @@ -0,0 +1,105 @@ +import pandas as pd +import numpy as np + + +def f_694(file_path, num_rows, data_dimensions=5, random_seed=None): + """ + Creates a CSV file on a given file path with random numeric data. + The number of rows in the CSV file is determined by the 'num_rows' parameter, + and the number of columns (features) is determined by the 'data_dimensions' parameter. + Columns are named following the convention: 'Feature_x', where x is the number of the + feature column starting at 1. + + Parameters: + file_path (str): The file path where the CSV file should be created. + num_rows (int): The number of rows of random data to generate. + data_dimensions (int, optional): The number of columns (features) in the CSV file. Defaults to 5. + random_seed (int, optional): Seed used in rng. Defaults to None. + + Returns: + str: The file path of the generated CSV file. + + Requirements: + - pandas + - numpy + + Example: + >>> f_694('/tmp/data.csv', 100) + '/tmp/data.csv' + """ + np.random.seed(random_seed) + df = pd.DataFrame(np.random.rand(num_rows, data_dimensions), + columns=[f'Feature_{i + 1}' for i in range(data_dimensions)]) + + df.to_csv(file_path, index=False) + + return file_path + + +import unittest +import os +import pandas as pd +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory for each test case + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after each test + shutil.rmtree(self.test_dir) + + def test_basic_functionality(self): + # Test with default parameters + file_path = f_694(os.path.join(self.test_dir, 'data.csv'), 100) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 100) + self.assertEqual(len(df.columns), 5) + + def test_custom_dimensions(self): + # Test with custom dimensions + file_path = f_694(os.path.join(self.test_dir, 'data_custom.csv'), 50, 7) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 50) + self.assertEqual(len(df.columns), 7) + + def test_empty_file(self): + # Test generating an empty file + file_path = f_694(os.path.join(self.test_dir, 'empty.csv'), 0, 5) + self.assertTrue(os.path.exists(file_path)) + df = pd.read_csv(file_path) + self.assertEqual(len(df), 0) + + def test_random_seed(self): + # Test reproducibility with a random seed + file_path1 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + file_path2 = f_694(os.path.join(self.test_dir, 'data_seed.csv'), 20, 5, 42) + df1 = pd.read_csv(file_path1) + df2 = pd.read_csv(file_path2) + pd.testing.assert_frame_equal(df1, df2) + + def test_no_columns(self): + # Test with zero columns + file_path = f_694(os.path.join(self.test_dir, 'no_columns.csv'), 10, 0) + self.assertTrue(os.path.exists(file_path)) + with open(file_path, 'r') as file: + data = file.read() + # Expect the file to contain only the headers or be empty + self.assertTrue(data == '' or all([x.strip() == '' for x in data.split(',')])) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_723_data_simon/complex_test_data.csv b/data/raw/f_723_data_simon/complex_test_data.csv deleted file mode 100644 index 98edf168..00000000 --- a/data/raw/f_723_data_simon/complex_test_data.csv +++ /dev/null @@ -1,1734 +0,0 @@ -index,data -1,"Minute contain minute fact. Collection someone always. -American sit example pay support how. Stage customer while source own behind. 79x91" -2,"Take way artist director prevent unit sure. Yard worry score operation cost industry. -The budget sit allow hit especially. Always experience door this." -3,"For avoid lawyer reach manage early sport rate. In station top ask. -Break trouble seek. Nor partner outside defense drop family." -4,"Any talk hard thought choose live but. -Imagine any get budget. Easy tree word take us than hope. Serve same explain south. -Not hundred large rest task turn. Activity force around apply." -5,Front down vote poor pattern. Who above worker plan five total less. Occur open about late remain. -6,"Her who bar college. Yet million property laugh popular major maintain. Number plan security song. -Ok operation behavior option new. Alone religious edge." -7,"Improve learn its personal which. Own rich truth table unit weight fight structure. -Improve what production soon. Loss enjoy since situation fall talk." -8,"Cultural and offer father author. Pm others leg. -Rather catch marriage page. Community behind green nature sea reduce. Simply five him write describe fly. In treatment little baby clearly." -9,"Item even prepare behavior business. Population back southern bit. -Type always skill order choice. -Fill movement star air market whose. Law feeling artist these." -10,"Dinner interview continue arm. Thank usually near well center. War candidate begin drop it discover when relationship. -Show individual whether safe realize. Service around PM car small." -11,"Under him morning game song sister up hear. Space should ready national recognize listen. Song perform summer onto toward avoid. -Garden every fish be happy defense. Itself respond less method. 17x75" -12,Material production half set feel. Wish camera age say set. Serve middle wait field hope. -13,Interview less capital color. Wife public no nothing enter. Girl chance carry probably. -14,"Suffer learn move great talk size produce. Indeed better water organization know sure. Kid country like field. -Outside rise fire clearly teacher none try. Rock very right level past." -15,"Issue son on Mr despite begin. Choice doctor play police. Little score reality much administration expect focus. -Entire top finish discover. You movement attorney culture country billion." -16,"Try main make this knowledge share at. Clearly mind little here. -Late policy threat onto. -Well race voice both suggest usually yes unit. Measure economy area last. 78X69" -17,Grow enough require process field should agency. Attack just ask feel. Service but sister seven reveal sea lay happen. Suggest those bit trial. -18,Science similar bank yeah quality forward kind west. Buy fire sea home issue. Detail force support friend hundred. Draw window nation push hospital. -19,"Serve himself service all high phone. Range yard good move now. Hit decade operation. -Type trip local white huge. Ahead in policy relate actually manager protect." -20,"Truth table mind sound. Fly president part call. -Particular student add enough always cell. Blue speak only risk I reveal majority idea." -21,"Girl decision both big. Car until in. Along share school. -Model cost some picture land manage. Many happen study reason guess people amount. 83x68" -22,Effect political church usually. Tough close floor card approach enough life. Unit black lawyer human last author billion. -23,Song against even begin onto mention. Customer view country nice. Build bank exactly air open say because. -24,Understand skin response according soldier. Reach onto contain per special carry course. Bill effort other artist church. -25,"Consumer member population. Option also professional approach. -History hand knowledge establish continue visit. Old final only four south all like. -Learn hear build collection." -26,"Member near common place half cup whether air. Daughter writer Congress agree themselves. Mouth instead population apply office while. -Cold happen such reason. Sing movement hit word political." -27,Staff article kitchen account reality. Medical much travel room believe several majority. -28,"Relationship government book five identify either exactly. Sort entire out clearly often. Others along newspaper. -Hotel job activity. By few speech whose whether like." -29,Nothing research already practice himself air TV. Eat know anyone property like speak but. Industry always cup we over fire discussion. -30,"No case usually Republican. Administration owner claim institution activity something together. -Nothing ready save condition either police. Magazine future plan. Learn our such." -31,"Give air more. Law single just policy myself test. -Prove body five feel make. Serious year college. -Determine as middle film central. Under hour wish place else. Name him that toward agent wall. 14x46" -32,"School plant five wrong western painting. Kitchen society body view add medical beyond. -Parent rise common stand music ten fine. Remember key bed kind pressure want. Dinner game must reach simply." -33,Management imagine service speak. Discover wrong inside environment test left good. Bed then small chair training. -34,Protect wish positive air power individual hundred. Hair could throughout price land method factor. -35,"All admit movie catch certain reason but girl. -Very fear case order firm beautiful. Old between in life end before suffer body. Continue drive cause sure." -36,"Training everything music I now across. Sell look determine artist by find brother. -Argue sell hard road. Tough clearly difficult of each. Child offer significant set." -37,"Whole whether about final question. Actually beat choose pull direction week maintain have. -Growth career trial concern wrong music. Century these under appear." -38,Number issue short. Agree eight strong measure age church word avoid. Beat performance thank sign. -39,"Chair first her girl federal traditional draw senior. Customer politics protect crime ground. Score shake woman. -Either part star who take agent." -40,"Computer avoid property trouble. No data evening weight sport field. Hot create near edge all source. -On stand order beyond. Least cause stop." -41,"Pick ahead manager little treat require health. Since impact matter. -Else visit interesting opportunity suffer could high. Organization ok himself. 35x40" -42,"Measure how citizen perform current marriage. -Card subject here. Few whatever parent to. Bit herself teach type party yard. -Arm scene accept sell. One relate collection radio you concern." -43,"Point race water gas three. -Include thing respond health plant stage degree camera. Set example site rather." -44,"Rise several commercial practice claim. -Them improve medical provide. Miss no popular rate city control push. -Couple check pull unit southern. Drop news hold third control traditional rich." -45,"Peace bar out financial. Top up simple who lawyer early outside. -Wait what hundred increase. Car letter see outside treat enter marriage. -Area public true draw range too loss." -46,Language interest four take old. Education if good level hand. Year body ever training. 78X25 -47,"Itself affect trouble respond community. Young institution relationship. Whom remain boy budget theory. -Consumer bad hot. Social debate analysis. -Laugh say easy different until. Large material whose." -48,"The sign figure. Today question group space floor soon month. Fill now first action. -Paper already window leader drop agree feel. Second eye however." -49,"Until book might order industry war me. Five lay small particular situation statement. -Decision whom about American view song edge Republican. Health test nature light recently window lot land." -50,"Half clear drop buy through turn. Skill follow election card she by. Check charge another space support grow remember. -Free let lot fear born. Even good agreement of free cup. Simple out light." -51,Others short professional ahead story address month. Order body less forget success suffer. 29x42 -52,"Pay across simply course conference free security. Respond star rise discover. -Already toward various. Arrive fear fund state agree. Everybody establish world old." -53,Particular item look toward know contain call. Prepare develop act. Support hour else deep. Against drop tell. -54,"The laugh interview senior oil mouth. Hard camera throw answer success nothing himself. -Question sit mother understand. Actually full future local agreement break." -55,"Letter necessary meet information. Certainly subject need best. -Of not soldier tree clearly. Training officer phone crime use. Letter action plant practice nor." -56,"Final improve loss risk leg require husband. -Follow class gun several main. Meet fall same brother president. Outside letter bag family because author each." -57,"Travel part show ability center. Yeah watch item light. Set else market mouth pass across. -Than some organization win break arrive. Ever middle represent thus list century far." -58,"Cell factor artist economic task between. Dinner fall share bed central rate million. -Provide always fight compare young." -59,"Group threat sense consider. Theory along great various question himself cell. -Other inside amount value. Compare tax finish food. -Language word consumer teach listen base. Fish image door medical." -60,Run mission of source kitchen against. Rest security always four foot. Better worry money shake hold lawyer. -61,Interest require gas wall. Different it see fish company. Meet law hundred more our decade low couple. 32x18 -62,"Past beyond real clear condition. High PM what central. -Behavior evening take site personal us require. Response attorney gas choose something scene with home. Case while little task rate physical." -63,Claim same environment scientist. Marriage tree quality attention hair sit. Son drive the finally. Medical order rate pressure in mouth. -64,"Similar five else performance. Real brother job build. Manage course realize really perform individual. -Behavior address growth skin us last all. Arrive table professor term cold word." -65,"Conference determine take couple site. Avoid government such remember great system clear. -Drive carry bad face trip. Face history rock garden so usually child. Door every page thing parent put." -66,"Leave woman sing security upon area medical. Project brother care project history build military. Participant region hair alone memory. -Whatever concern indeed partner window window economy yeah." -67,"Administration one west rate court citizen idea. Visit mouth Mr determine arm. -Common result five citizen bank. Kind pass spring arm. Continue center none poor." -68,"Third decade themselves environment appear goal. Story important social office Mrs. -Serve few little. First seek live. -Subject size tree crime describe reduce four." -69,"Detail receive tough would pressure institution then scene. Policy rich none already plan realize. -Attack then personal. Coach history activity knowledge. Different despite until price act." -70,Citizen wish among have prevent former strong. Recognize everyone them political support second husband network. Society general process wind pull example know. -71,"Book often recognize daughter chair. Help evidence themselves baby box newspaper American. -Cut board issue anyone wait oil some. Product provide especially ability. 67x1" -72,"Recognize wind visit allow. After speak seat cause report message. -I business our single have. Thought rise hundred among. Economy short writer local." -73,East real seven office western. Behind perform more radio money seem statement. Agency evidence ok trouble. -74,"Boy help ready discover. Center peace million pay sit. -Any face miss million east surface wide. Positive would movie official gas born. Discover performance similar teach along who." -75,"Feel sign reason white name property key. Growth mean at board. Arrive apply camera. -Data law leader professor single. Player bank authority ask sit sure." -76,Still trouble bit while attack. Deal across relate officer health care. Young white step deal word perform expect. Factor born around through young. 67X33 -77,Whether PM building blue bit. Specific executive choice thousand. Course Democrat explain thought collection hit. Mouth list cut wide natural country project. -78,"None enter film table cold impact film. Executive article TV kid member. -Black employee yes region ten prepare western. Fact political upon research." -79,Recent probably only where turn see girl. Behavior industry about some. Table agent political billion. -80,"Party laugh cover manage artist page rest. Own sea can religious organization. Size would compare play station. -Power major thing amount watch. Buy star customer." -81,"Long control idea couple room. End own talk attack spring against civil. -Society market that matter word include shake. 34x44" -82,"Letter improve trouble according former. Score fact think tough image also. Rate physical behavior if church. -Medical bank rock. Stage great style often far soon eat. Most Mr mind wonder." -83,"Trip truth might beautiful. Operation character law special step. -Inside main there practice difficult camera. Run road ask build sign. Piece on any song." -84,Right whether serious avoid consider Democrat least. Pay hospital friend instead strategy mother meet. Author eye force physical. -85,"Hundred kid this various perhaps sometimes condition. Us section begin everybody owner rise memory. Rate wait list six manage. -Paper opportunity tonight relationship response some like." -86,"Other south bill fact computer occur. Clearly continue create later decision. -Source network sport cost season us trip. Rich law computer age industry sense." -87,"Fly because rather military different already fish across. Argue but by claim decide. -Fly act country data just task. Including firm certainly poor book community." -88,"Campaign bill her structure public order. -Foot catch meeting letter. -Out guess tree deep. Keep house city key improve pass discuss." -89,"Adult itself indeed level. Of policy question drop include day. -Close follow doctor. Figure quite raise single ground education. What west democratic here Congress behind." -90,Most unit group challenge office think rock. Assume light strong hour. They guy claim end full read important analysis. -91,"Join commercial reduce firm. Forget meeting much less side old. -Lose right drop unit. -Executive bag dream system artist station plan. -Eye people perhaps during. Financial reason each light. 55x66" -92,Activity sea campaign. Able break national development. Court along along father once computer team in. -93,Cut source wind view investment social theory so. Off still much quality together top finish. -94,Spend indicate month from surface. Green agency free magazine subject. -95,"Kid national too wait clear. -Long sense war. Any represent wrong statement. Rule floor meet skill anything commercial. -Policy always worry gun." -96,"Glass write across traditional. Listen box make movement who. Voice need factor fund age city husband. -Second prove order plant. Pick start interview power none identify. Health stage big." -97,However specific believe condition. Name town organization listen around idea. Travel process certainly. -98,"Last tonight shake cut near teach list to. Tax discuss professor simple law. Person guess allow rate house. -Head knowledge church wish. Move call identify agent." -99,"Forward why final plant benefit work officer. Against gun senior painting cultural learn beat. -Mouth out score see throw develop. -Then toward way no." -100,Position once owner up field. Source present during. -101,"None write top bed itself. Father protect team two leg certain. -Around guess idea. Investment focus difference wrong small. Fear pull once listen heart window. 2x69" -102,"Use along blue rate. Could coach deep these. Development lawyer important suggest despite. -View newspaper drive tough. Than fight certain couple." -103,"Seem why career loss billion magazine education. Specific hard final college prove drive. Wind tough star two specific. -Week management tough eight certainly. Wall information after very main guy." -104,"Unit red friend send. Radio across through north organization ask. -Father across Mr beat article couple run. Without collection imagine take million rate beyond." -105,"Will ask forward. Around tax debate view first. -Yeah so house. Upon magazine do life stage what world. Save agree some commercial me available assume." -106,"Budget form write whole. Argue fast girl letter. -Consider today chance safe production collection as. Yard action policy safe exist probably improve. 34X11" -107,"Peace buy win performance. Decide word sister change. -Close identify detail. Surface issue inside beat. Environment cup church unit." -108,"Wife appear democratic ask. Provide about film score. -Carry figure friend act husband. Eat top American also forward. Black must behind continue consumer. -Truth long from bad let. Debate then ground." -109,"Either hotel pass detail front since care. Song must easy rise campaign. -Society century operation place fact son among. Pay rest for despite expert. Happen though star brother." -110,"It in beautiful. Interview industry direction road big. Wide wish response rest time government. -Culture political future. Piece eat air say seem. Event energy maintain conference figure." -111,"Tell view American ground. Candidate huge run. -Act again every pay house sport special Congress. Safe we health well over including either rise. Population rule approach opportunity. 25x16" -112,"Few race pressure present. Evidence team hand agree culture give high contain. -Stand so summer tree special task live. Worry page option somebody fire. Kid mouth place alone." -113,"Shoulder support enter leg. -Religious exactly especially while. Voice sell state big professor." -114,"Art field stage red. Stay maybe or direction or style bit parent. -End old certainly expert impact. Public support stay past who. Reveal operation despite present build spend." -115,Industry outside entire take course. Stage boy local Republican music gun. -116,"Out bank huge here cold. Grow ask analysis prevent. Dinner hard usually wish hit list then. -Few article skin claim network before. They six something wish consider sometimes indicate." -117,"Field amount quality. Financial local ok instead surface. Million wall onto position. -Remember candidate medical. Join movie eat see bill." -118,"Course return other major. Drop outside management who. -Great safe leave among yard read give economic. Surface economic reason particularly who thing by." -119,"Direction increase owner in decade writer. Read rise end themselves. -Right public religious growth thing. -Close meet available local full." -120,"Five capital real develop ever floor president. Wonder answer middle light. -Central approach car drop trouble. Sure professor weight. Town kind focus sister long success member." -121,Operation enough writer indicate fill officer raise ball. Later security set all eat rich during away. Space far individual itself offer career fly himself. Vote debate staff start. 86x60 -122,"Culture determine police common thus. -Up last present today see fight law. Chair born relate heart difference community." -123,Box dream kitchen skin rather. Onto career lawyer even fear front. Dream style bring none goal will serious. -124,"Eye final peace. Poor continue food education long. Painting back coach despite may until role. -Very pay raise color reason. One bag adult." -125,Create ever always too. Become record us truth majority. Call future hundred church. -126,"Body chance our anyone she degree. Low field exist social. -Defense life structure partner most stuff main. Interesting from television series." -127,Guy hot walk base billion various. Collection available such while impact into stand. Out easy fish they life these traditional. -128,"Month century development study ever. -Management name sing these gas. -Fund technology would individual night soon picture. Nation rate college measure administration Republican." -129,"From yard teacher end bad perhaps. Term issue sell American positive. -Bank air town occur the behavior make home. Meet mind might. Heart trial accept painting." -130,"Involve crime anything song happy oil both expect. Result exactly person trouble. -Practice important pass huge. Item statement pressure finally." -131,"School beautiful body themselves special simple gas. Entire thought test pay. Customer direction hard health. -Quite receive hear. Hard who foreign station bring. Head stop another task when home lot. 78x2" -132,"Structure step whom. Adult after special much. -Nor language above generation watch American after. Husband economic suggest old themselves city particularly. -Give deep try ball him." -133,"Door here production protect. Pm recent summer everything. Television shoulder firm particular try improve hard. Response environmental war authority instead maintain chance. -Never conference system." -134,Of news voice pattern lead necessary. Western many up some small executive. Couple structure first growth laugh. -135,"Car remember expert point. Culture chance single. -Hot dog serious time possible cause sound center. President bank front west develop body." -136,"As lawyer million line response. Mouth week world fill item entire nature. Matter vote safe senior. -Their speak wish prevent change sea evening mention. Environmental word past total ready. 34X72" -137,"You force check particular risk interview. Value a baby. Your how nothing. -Election action contain no. Opportunity head break attention hand." -138,"Behavior partner well other. Foreign might officer. Yes feeling fund factor. -Hot find run most trip contain. Hear themselves form to future town listen." -139,Tell skill would for leg. Forward middle program morning particularly hard wish. Seven material well opportunity night. -140,My doctor situation voice age side. Between certain beyond into opportunity order down interesting. -141,"Analysis actually painting reason. Size arrive spring. -Degree number knowledge know star trial. Everybody century follow form wonder itself. 20x77" -142,"Civil such shake. Five task back three item itself treatment. Court than call concern four. -Win kid draw need. Very respond actually difference. Wear hotel leg outside series." -143,"Any force staff church. Support including decide organization product strategy hot simply. -Staff tell size example. Hot space parent away teach open. Reason medical on open degree score." -144,Opportunity almost apply executive blue about. Rise star ready represent American rock. -145,Teacher address beyond blood. Party number clearly catch star side six. -146,"Son imagine soon growth. Trouble through us heavy wait six. -Front feeling fire. Back learn race with indicate. Discover including job resource generation." -147,"Adult trip bed say. Conference black current. Different money road scene. -Value pull friend trial such choice evidence. You around listen everybody suggest increase. One expect onto." -148,Drop reality agency however. Particular whose own environmental man talk gun. -149,"Rate return institution over avoid property protect. Home religious camera kind. -Science father take real charge. Situation number above time reveal. Sure direction goal difference may." -150,Admit Congress candidate policy walk value success. Investment figure ok population reveal. Without different thing husband painting economic thousand. -151,Concern catch gas. Everyone ask student. Chance surface around ask at. 26x53 -152,"Young my say third. Field minute image race career. -Student season hundred maintain. Various heart story take event fund recently." -153,"Must best environmental himself vote media gas. Along win put suddenly husband operation. -Green address listen continue senior bit. Entire believe size husband begin film difference." -154,Attack I while film. Author network soon last nature oil positive information. Throw since find purpose window dream education. -155,Sing business nothing explain social something order. Ten process with set also his decide. That name bed read all life. -156,"Large practice town pattern tax. Check four true back. -Institution red still. Star entire avoid officer. Catch play activity suddenly door. Discussion shoulder data prevent history sometimes police." -157,"Sound else upon agent beyond keep. Make then newspaper forget. Into few go movement during business southern. -Song six discussion single city. Material heavy measure professional turn." -158,Lead free people once public school. Fire board few return year southern. Behavior road central international either positive opportunity. -159,"Likely enjoy act lot. Democratic hold ready follow continue whatever bit employee. -Ready property able science mouth. On family executive understand perform necessary son." -160,"Enough whom discuss there television someone. Scene west huge heavy. Baby explain way career speak mother level energy. -Product up arrive go appear city. Series always son field weight loss." -161,"Impact court song. Campaign theory career during behavior. -Future also against black tree himself. Another past for argue produce maybe. 94x65" -162,Campaign purpose body dinner on authority own. Arm necessary peace good hospital with. -163,"Blood security decision TV central arm money. Beat use data range bag pick offer. -A their sense three. Nothing response natural business scientist southern base." -164,"Name recent heavy specific clear. Rest wrong want including ball career important. -Start end guy provide everybody. Issue inside charge prepare role." -165,"When approach behavior each court. After coach hour whether reach. -Democratic nation author ahead term. Hot politics quite same." -166,"Main hotel production nothing. -Coach voice five party ball building. Hold southern with about. Name despite party PM image PM although. -Understand could up machine. Whose public huge. 86X0" -167,"Service down sort another control. Far for Congress despite of pass edge. -Article up have ago look. Different never share require entire. Lose positive type marriage bank." -168,"Place dog current. Political loss left begin rock week. -Ask culture whether operation somebody every. National center tonight street we think year win. Keep usually evening practice final difference." -169,"High point strategy specific outside. Three down story. -Beautiful technology material suggest during real. Certainly stage feeling school whose would hand. Unit another fine clear mother debate." -170,"Federal use add cause city. Feeling short song much current question. How response TV which blue make eight. -Win however gas. Authority task newspaper also." -171,"Present if party dog our leave. Share against expect billion standard. -Memory close weight market. Southern purpose race lot hold work. 7x91" -172,"Behavior feeling stay citizen why peace. Challenge there near show career treat. -Family black yet seem. Model ask power court or save." -173,Hour benefit act artist travel us president. Image evidence compare all certain this about. -174,More whom me bar per. Imagine but firm answer same. Now old soon food work street. -175,"Free major improve maintain commercial let wish. Former defense series alone work again ground. Most official do. -Country simply crime mean fall dog out. Moment star walk week peace student couple." -176,"Director particularly paper information material. Pay after travel sense foot town rock. Six natural note front this buy as. -Your read start everything task front rather. Red north every him." -177,"Computer reduce scientist test interesting then personal hard. Model him difficult weight purpose. Memory dog next market. -His ever player less." -178,Real reflect cost official agency dream. Free job officer measure part a. Alone likely ground impact process budget. -179,Free later since three sell level week see. Source toward return customer. Tv home evidence yard song especially possible. Spend should light. -180,"Music animal city hard doctor woman. Thousand partner family send. Issue class already. -Since end bag future challenge series certain learn. Article bed firm allow stop. Drive herself former." -181,Throw ahead half prepare main special suffer. Thought administration total research believe practice place can. Represent artist of ask four south east. Floor check huge home late. 99x58 -182,"Begin role notice will anything me fast. Increase bit strong term bring number. -Stock admit practice no prepare story very activity. Later again theory large. Economic course section age pay likely." -183,"Road member another administration world. Stuff wish tough they often. Board hear develop next newspaper carry. -Entire site them your agent. Development from defense federal." -184,"Day evening gun hand. -Wear bit concern top. Under view weight seem present. Dinner nearly deal win." -185,Especially television later yes almost. Successful set face lead establish anyone. Perhaps nation same clearly practice information want the. -186,"Fire career participant seven. Fill well drug. -Second air few dark magazine detail sense. Minute year water news lot range. -Cultural tell my political." -187,"Enjoy list catch best just. Begin oil again any sing. Travel small stage staff. -Seek per future person themselves." -188,Better religious including soldier early may out. Single hit throughout mother success someone tough. It enter activity business house offer decade green. -189,Police success ready on big. Seek specific time family everything type sell. Industry team school. -190,"Decide begin human look major side two. Rich expert music policy four yes. -Itself traditional also plant positive miss. Particularly begin eat after along." -191,"From economic discuss now. Top economy poor then. -Theory question yourself across two piece character reveal. Soldier go series herself two while tend. Question air answer foreign. 21x39" -192,"Sell short today involve. Ready well fight within worry. -Plant cut letter sense those many. -Why candidate fire kind minute. Perform way along control. Offer before reason." -193,"Either herself adult support church. Think friend marriage because six decide. Mr as possible baby also compare peace of. -Need how fire already adult person card. Cell as father boy pay." -194,"Together grow music operation color group professional. Arrive big tonight machine human. -Many drop cut crime issue oil left reduce. Meeting matter green measure north necessary off quickly." -195,"Important mind maybe magazine phone about good. It which almost occur hospital always. Explain hot number section. -Figure edge point significant animal. Strategy try night pass." -196,The writer parent. Life social house west ten western. Listen commercial since once ask feel drive. Discover family political such bring message. 86X94 -197,"Future modern natural such. Get tree your they whose high able. Travel whether compare population only interest wrong son. -Child place street expert. Road computer serious somebody." -198,"President early Congress evening. Address move speech tend company action. -Many class other radio. Reach item draw wrong she size. -Politics ground option degree. Successful rate he." -199,"Indicate enter play worry. Thought floor your up must. -Environmental training lot day. -Pull space door media." -200,Process degree once price lot possible however. It nearly attack including artist land piece TV. Wife free example. -201,"Ball walk look scientist. We democratic assume. Experience itself student study instead behavior have. -Manage sell note action response member strong. Treatment guess focus south be day suggest. 62x22" -202,"Painting key point light compare. -Represent program keep want. Operation watch chair recent last these that. -Hair tell star because pay." -203,"Style power head. Their do difference just whom science stage specific. Try military forget lay analysis region. -Represent to sport relationship enough lot. Simple ball then career." -204,Represent possible east guy financial ok. Trouble real easy short return year. Ever not then military brother film able. -205,"Model left hour animal doctor center. Relationship medical beat buy. -Often economy agency husband. Past political score small church if spend rather. Professor he specific." -206,"Do money push many. Break instead field another base realize place. Statement especially player tough piece. -State reflect coach me. Pick loss thought white." -207,"Her but line sign teach. Bit new maybe memory man information throw bed. -Long product Republican result them cost. Rest reality become wonder artist pass reality toward." -208,Opportunity article prevent make. Director wish season sit owner fast glass. Rather happen probably of tough leg early. -209,"Owner relationship us response. Sign risk civil director make. Nation their year rate. -Similar product include fast view. Fill happen though smile born." -210,"At their control lose value oil think. Heavy security cup. -North onto decade American themselves ground old cultural. Huge foreign market side service condition." -211,"Fund several agency oil. Evening plant thank test also yeah. -Skin interesting of million kitchen culture. Security happen practice next to tree himself view. And economic better just. 85x53" -212,"Difficult power picture sing. Happen drug really stop once fish. Leader wonder former attack young authority thought fast. -Friend hotel require fast much traditional job. For agency memory." -213,Bar wear plan step president. Condition store trade government. Step dream expert fish production recognize. -214,"Force throughout inside compare. Similar court similar magazine. Report test option doctor agency. -Require firm player allow. Fight soldier food." -215,Add wind information choose glass perhaps reality. Environment doctor research lawyer quality yeah service. -216,"Throw child thought receive third begin pay. -Against person own worker carry place. Choice article book include relate watch dark. Risk dream door hair nor final later." -217,"Ok treat fill open. Blue body remain. Republican kid also drop defense. -Suggest stop increase dream next general own course. To rich when interest cover rest." -218,Firm mission find fear note rule will. Relationship worker growth play lay exactly camera. -219,"Eye thought page seek. -Down weight foreign. Human start data policy prepare audience hold. Play front after strategy hit money scene. Tv treatment manager someone forget." -220,"Official within international job. Major better last. Thing responsibility decade fall. -Feel shake piece. Story mind down authority daughter be beat." -221,Trial cold talk child peace money. Image prepare social detail society imagine majority. Should hotel recent difficult explain oil. 94x34 -222,High commercial listen Republican test but. Stuff check more identify walk mind. Guess coach natural century. School instead song other rest without. -223,Painting street material speak do animal once. Place within rate improve candidate box country. -224,Career prepare myself behind someone ask city early. New provide happen walk rock recently different away. Room if those history nature though fly. -225,"Key agreement thus truth may. Whose now second run American where employee. Near send foot tree chair foot. -End simply away possible activity. Fact staff north study hand. Film door matter hear." -226,"Serve left finish letter however cover. Government our fact. -Police size interesting collection. My why there. Policy deep protect president wide father. 59X30" -227,Go last continue they wrong collection station. Power degree husband bit energy of. Subject eye keep race account name spend. -228,"Just thought look goal. Catch such poor likely traditional. -Speak rather media million pay. But word common picture thousand around security. Public step friend pass protect or window." -229,"Whose option son hear defense. -Country building guy conference recognize mention course. Card why part foreign scene. Sometimes my course attack." -230,Like culture country ability thus quite. Through clearly garden professor sound eight. Assume rich letter course service material resource against. Executive foreign hope. -231,Success night action poor candidate pass. Cause how medical great figure toward. Us according management kid improve hit. 85x1 -232,"Friend special matter building yeah although. Teach station president trip chance want hotel. Thing product indeed cover. -Sing even rule perhaps suffer." -233,"Far through public step. Guy among wrong. -Discussion specific message since interview pull must. State almost either result beat leader right garden. -Subject administration range win." -234,Trouble very large inside enter class continue. Win else generation dream simply. Environmental speak moment investment somebody. -235,"Item at thought eye smile. Sit carry be race. It interest record alone key industry score. -West white address young it. A budget outside grow data. Strategy machine job student bit." -236,"Near enjoy fall. Yeah country trade fight official instead interest. Others difference special and environment future. -Watch hand him. Call among face chair arrive onto language fine." -237,"Give important watch meet. Travel my almost walk sure phone run. -Raise thousand understand use situation bar. Community this within discussion surface hour another." -238,Parent adult my owner. Leave tend hair network show bit event. Sign production technology. Possible go make somebody wonder price occur. -239,Majority race son likely organization eye. Just stuff grow set usually. Develop remember garden. -240,"Involve cover visit former usually happen general. Choose concern level public upon. -Mind cup then exist. Politics decade couple must per hot financial. At process raise your song test." -241,Science reduce later science. Cold win bit perhaps discuss quality range. Hit whole method audience public difference. 90x2 -242,Civil according that second Mrs tax. Available wait read interest find. -243,"Relationship everybody chair magazine later care federal. -Past require book. Work improve action fact north lose. Require mind hair." -244,Cultural animal not capital protect loss heart. Company tax prevent century institution seat. Role population all seem feel radio. -245,"Teach back early about experience short. My support less reflect order air six stand. Class generation politics. Truth future moment education. -Seek trade modern beyond small rather alone." -246,Industry total little everyone thank way. Son upon him politics indicate. Character serious thousand already father where power. -247,"While skin suggest table service hear wide. News sometimes end sport. -Method activity simply edge. Idea ball goal any how. Cell newspaper matter join. Pass enter type or." -248,"Never sing forget person. Operation almost lay myself article prepare seem. -Charge perform stuff anything yard your. Group that food store tax traditional." -249,Risk voice south brother lay season prevent actually. Send gun similar similar anyone course kid. Film standard bank guy right sometimes lot. -250,"Them soon power vote board. Major stand family account tell much. -Paper after take population. Indicate western another issue today throw. Analysis city call tend." -251,"Fact everyone small break time yet white. Mouth control family smile open. -Difficult under ok. Partner method new land. 90x32" -252,Understand successful these attack defense buy too. Long control grow collection end. -253,"Card decide water such future career. Wall yet bed sign little majority blue. Indicate run away act laugh ball. -Trip policy as environment. Expect how your detail office resource." -254,Focus whose high message toward. Music represent present politics. -255,Whole design executive. Cultural tend effort your performance realize firm. -256,"Father air heavy reach ahead foot. Get coach candidate organization important exist. -Or most against skin loss go. Scene religious picture everyone. -Investment bar money way machine bring cost. 82X63" -257,"Specific themselves film item question production. Nation heavy maybe happen our. -Yet training work cultural Congress." -258,"Various mission activity by gun. Paper there most friend. -Hard suffer information method. Easy modern room each." -259,Threat for coach team lose push. Share law throughout Mrs pattern statement. Agency hope mouth country research feeling. -260,"Mission defense really argue. East time effort compare and kid. -First face able phone subject. Peace page thought perform important Democrat." -261,Customer game focus respond that central. Night them sport night beyond send question. Eat minute plant always camera drive. 56x53 -262,"Return crime month full. Education design worker feel herself. -Relationship leave especially role book. -Form prepare member under. I forward executive." -263,"Imagine upon short effort this inside. Read college nice keep. -Put party produce yet wife high. Already degree list begin majority. -Score approach next impact bring. Send believe everybody career." -264,By after writer American. About letter professor say first. Address authority public miss. -265,Chance while quite choose not blue. Approach pressure range for. Decade Congress change science take anything. -266,Wind force American we federal either. Tend no what take against member choose. Production nice run never tonight. -267,Benefit wait before every help detail. Everyone forget those suffer actually already window. East likely adult hundred physical once. Yourself generation opportunity eat campaign window artist. -268,"Entire authority role smile. Issue image sure see agreement again senior. -No scene national really policy. Perhaps goal charge beautiful executive also. Employee stop establish natural color very." -269,"Job white story. Church popular ground chair. -Laugh child argue our. Style side class arrive effect. -Or employee exist indeed. Father too say." -270,"Now add attorney ok. Example fight behavior Mrs report great. -House data major ok system film plan enough. Miss see catch often. Alone stage process girl. History factor responsibility blue more." -271,"Fight those on garden entire artist. Citizen situation design each deep. Whatever hit budget indicate prevent. -Alone national use happy build expert method. Six carry town region suddenly. 84x66" -272,Painting rock main environmental. Owner already good sport turn resource. Note themselves drive suddenly offer more article. Surface fight herself behavior event style huge. -273,"Wear plant water. Any officer body act. Blood spring page population. -Suggest seat democratic." -274,"On compare ground main statement moment. His meeting yet chair place individual. -Poor television share suddenly skill. Production change institution second everything report read art." -275,"Oil fill since today tough thus. Fall add rest and. -Look go pressure marriage decide man story. Meet recent away provide hope seek never offer. Run entire life kid however." -276,Front old possible southern. Nice cost work just. -277,"Voice air two. Total spring left child protect just. -Make real we deep develop blue. Often land behind whose stay must wide." -278,"Foot last defense indeed anyone spend bank. Citizen toward indicate morning begin. -Food than commercial find. Still together magazine fall." -279,"Treat the probably election PM wind. Expert interesting impact nor. As practice data tough eight. Seat color friend onto. -Huge or protect hear phone sure upon. President huge for mention beyond." -280,"National receive leader see number stuff. Lay dark loss garden system marriage positive. Black something environmental add clearly perhaps. -Mind with information. Task take couple movement have more." -281,Result down discuss environmental successful. Scientist instead six newspaper think out result. 47x94 -282,Enter society themselves sell movie. Special section firm news instead nearly. Gun likely everyone arm health that. -283,"If option factor her. Five situation organization start deep wrong cultural. -Morning apply production doctor whom affect. Study trade into. Article goal understand learn." -284,"Yourself she science gas enter. Republican bar character energy work during country. -Husband somebody executive yeah customer. So cup all." -285,"Real accept truth pattern recent bit. -Foot yet and left message guess operation individual. Carry forget day resource benefit within. Economy arm eight sea evening scene." -286,Plan game able it star Mrs itself. Positive cost summer name fight his. Past act letter speech future mouth raise value. Floor edge cup way condition week. 56X53 -287,"Suddenly about entire wish center. Travel about who save have. -Third fill language chance when structure because. Say require listen smile society newspaper month. Few yourself paper." -288,"Kid case treatment hard involve particularly win. -Line blood rest keep authority yeah. Owner PM blue measure walk anyone red sit. -Same sell relationship behind course term. Card trial heavy." -289,"As order remember north run. Store happen kitchen perhaps policy charge. Main second situation develop view appear. -Least buy family trip throughout prevent husband." -290,Pm suddenly building become large poor. Money minute success tend foot long produce. High tend significant certain today character win. Town black enough hard without. -291,Staff inside page situation five feeling. Price chance around also investment collection community. Option program several energy. 42x56 -292,"Maintain military option. Talk medical audience may early box. Beautiful loss traditional. -Interesting together degree. Trade list argue executive adult strong. Whose production fight set process." -293,Focus another laugh then especially where opportunity response. Cup wait political who beat a. -294,"Move create early prevent that. Likely beautiful adult gun message. -Get necessary letter. Pm establish director local we culture step. Discussion road if them plant interest." -295,"Finally small old economy away. How approach level song. -Including wish late identify government culture system. Off past wind hope their question character Mrs." -296,"Before moment feel story pattern speech series body. -Another training movie compare color include." -297,"Work understand every bed someone probably avoid. Four indicate certain near conference trade model fight. -Amount machine charge describe civil affect. Region fast different system pressure customer." -298,"Table range movement soldier generation. Front effect could just discuss sport record. Exactly gun government difference opportunity recognize think. -Learn make anything early money class." -299,"Test use affect major win. Vote money admit election figure mother magazine. -Music media name how ever have. Him financial beyond themselves. American happy appear." -300,Star seat Republican remain. Tv thank old nation it apply. Him good within certainly cut. -301,Significant in animal recently son event relate. Store too relationship today. Concern create cut policy watch do us. 39x4 -302,Power answer base. Play suffer economy alone success hit. Assume stay trade morning drop. -303,"Level partner sign arrive no language. -A let a increase. Read rock last charge pay another true style. Support hand remember man defense. -Film before TV." -304,"Yes air tax candidate discussion cell animal. For fish agree. -Certain wind choose want include you class. Scene watch standard." -305,Talk reduce in since value organization. Campaign forward national simple admit next style. Professional window good affect action. -306,Throughout blood camera support woman day. Begin finally often attorney. Sit wife concern crime. -307,"Third remember TV. Position charge hotel summer. -Total thus site hair. Data food though beat phone. Party sense Democrat west in what take." -308,"Participant person will history reduce. Bank already ago wife change financial out. -Box real look hot. -Line above possible anyone executive language. Television make couple line key." -309,"Career describe Republican. Daughter clear country interview move Mrs. Effect let kind finish operation. -Threat early thing involve partner. Resource student easy ago." -310,"Others quite machine anything certainly sense throw sea. -Face as important its standard. Argue indeed Republican process condition respond skin." -311,"Top moment consumer fish determine technology now southern. Few wear situation tell that throw. -Environmental visit write add. -Several manage month. Lay identify check at story professional property. 23x34" -312,"Tv start draw response team foreign blood yeah. Order age political economy church picture truth. -Young room old food. Throughout through community it animal say chance. Section late lot later." -313,From once air capital develop better. Carry conference director so. Base painting soldier air baby audience item including. Onto smile long find. -314,"Use family sister experience seat police. Career eight officer turn turn. -Republican country watch something. Agree movement these against phone administration great. Industry you world body this." -315,Ready might establish increase others material two. We rule popular here through. -316,"Operation every whom at information. Technology strategy want idea it year four. -Affect make test eight official. Half red skin image happen central action maintain. 38X50" -317,"Camera senior west standard skin. Bar speak medical ground energy if trade. -Mind fear class television. Piece activity doctor increase role view." -318,"Girl court chance occur approach. -Nation about owner discuss different you reason. Possible beyond day your brother rock deep. Total which season consumer wait idea suffer." -319,"Stock chance mouth happen else note. Yard less whether learn. -Spend clearly offer region notice house. People trip sport answer our. Me machine event now three total model." -320,Force body law decade recognize face organization. Social itself lot science whatever nothing particularly. Measure foot break. -321,"Beat hot thought lay level such. Talk remain argue would five may tax. -Again for follow. When security child. Save do reality ability reality north. 5x32" -322,Know where range memory believe eight section person. Mr community model small kid more. Area themselves audience. -323,"That form affect practice career. Theory food do drop environmental huge. -Necessary painting according pull natural development. Economy meet area onto smile impact." -324,"Star third eight care smile determine beat. Send chair general describe simple. My class east help big pattern upon. -Product place while history when Mrs husband sing." -325,"Ground training commercial manager. Describe course while. Job benefit story. -Situation available chance since three maintain social. Industry itself soon trade fast. Each fine fact page contain." -326,Thing sport leave heart rise. Citizen resource once industry successful music more tell. Without no style summer idea industry. -327,"Media teach west story gas debate national. Garden defense officer particular. -However prevent threat water culture so. Degree reflect apply expect." -328,Say fire notice should owner two. Eight issue she leg behind likely tell learn. Fly always too position crime be. -329,"Try industry analysis each. His term whom idea which. -My I blood girl deal. Quality couple financial treatment. Model until film any must line brother pressure." -330,"Onto response off to college. Shake pull protect such. -Another now gas will. Because I future her. Rich less question so but." -331,Miss drug natural success. Southern across increase down newspaper another art. Recognize some part her thousand lead. 6x66 -332,Decade every job usually type. Owner present beyond across unit community various break. -333,"Just factor career so. -Rule probably key thing page authority plan. Enjoy ask everything operation while night situation." -334,"Along thing sense. Positive mean control still low specific real young. -Weight candidate pull reason indicate. Officer mean eight news his forget project." -335,"Memory party present election. Strategy argue grow image. Bag tax thus must would. -These court peace growth fine. Moment money customer music popular month." -336,Analysis girl rich. Increase statement significant. Away plant indicate term scientist test thing reality. -337,Chair professor down them nice significant final seven. Individual play national side quality home. -338,"Since image concern. Meet material home rest. Field wife send education. -Write tend face maybe reach discover sit. Political keep per term near foot. Else sort simply consider mention." -339,Everybody win fill suffer region. Blue hundred add clearly attack. Break scene affect card special. -340,"Worry newspaper few player there voice. Someone however science establish quality. Hair country compare country. -Decision road ever attorney want cut. Night between enough approach." -341,Job goal share soldier happy. Majority traditional meet single notice. Experience likely ground order. Go begin certainly southern drop enough standard deep. 24x75 -342,"Field standard never especially hair. Between performance per new mention light blood. -Which street whether age right." -343,Miss show deal family because fight. Test church identify interest around unit. -344,"Bag for million base interest your. Voice senior factor. -Young build interest yet form. Commercial they learn she walk across call. Job least always light data loss effort." -345,"Cause course base race night ask animal. Computer recent attack. Cup about interest clearly major. -Reflect glass attack effect. Another out rather they number great wind." -346,"Almost somebody spring avoid. Find office southern meeting. -Mother politics election note consider here service. By best per party. Create community theory. 49X64" -347,Here long defense message well north ball professor. Either whatever interesting history raise agent. Sea month teach assume space. -348,Company end you building campaign. Instead write son. Anyone history nothing within north light. -349,Story usually administration establish card move sort turn. Like street food late heavy outside. -350,"South offer certainly class quite heart drive. Walk change ok over eat bar. -House on about whether stay. Professional reality executive husband list take. Citizen few industry little ok age." -351,"Defense bit difficult firm yeah. Box control guess effect leg many. -Sea bed nature unit. We but could trial. Stock together energy about. Member so yourself instead partner. 73x4" -352,Wonder wait strong candidate everyone. Sound individual behind hand mind together improve. -353,"Because think lawyer event be. Professor officer power role move receive activity. -Money speak build call response. Student evidence most consumer safe generation happen board." -354,"Force alone four fish. -Child conference risk. Later positive range least. Yourself democratic account participant wind position true security." -355,"Itself phone business different during life number. Money machine pay. Serve alone man defense different send possible. -Politics size reason necessary. Note radio third fill." -356,"To account mouth ground. Present ok enter blood. The question before inside cover majority. Less country magazine walk. -Machine carry walk understand summer state. Rate speak authority top ask." -357,"White everybody truth part represent show anyone. -Check body anything new resource network. Establish better later than business pick your. Mission economic cell operation finish win woman." -358,Realize idea by paper whose forward. Until many such along condition others for middle. Purpose statement crime. -359,Daughter action responsibility pick performance. Great language young avoid collection career evidence. Rate yet degree kind play actually. -360,Grow door deep health remain. Red kind single throw born we. Truth lawyer likely board exist act voice. Not happen open machine keep too although answer. -361,Model role beyond attention edge. Attack grow sometimes southern. 64x26 -362,"Sound positive director reality side. Once painting range order none personal arrive population. -Plan it organization baby especially wish serious account. Cold at specific book." -363,"Democratic cultural gas military. Fill fact daughter best who. -Easy some president future professor sing finally. Others author coach sing goal tough strategy training." -364,"Central finally next thank tough. Soon even thus real red. Institution go piece create discussion including speech expert. -Instead director hair spend street." -365,"Suffer role age statement issue reflect probably weight. Writer pick when show both. -Drug young near end who left kitchen. Indicate within include any government learn." -366,Camera theory prepare account. Live American manager director collection once can century. Approach amount rather old. -367,"Glass drop bag rather. Population himself father. Them wind issue toward door baby stand. -Ready already family card television upon wife." -368,"Part kitchen radio no notice. Start citizen carry. -Along religious almost foreign war require window. Range pick history unit information. Piece to thus pick nature." -369,"Here page hundred poor. Chair keep mean around purpose follow per. My after trip continue vote rate decade. Rate arrive American. -Out result song pretty require game. Pay news executive American." -370,"Pay help difference throw buy site guy cultural. Last market political air day help. -Million western plant team health. Behind in several despite receive memory station people." -371,History investment will realize city walk. Kitchen or too. Side century TV federal do company in. Plant answer space structure assume indeed difference. 7x86 -372,Away something himself be finally area. Other specific may population American buy beautiful. -373,"Old site edge they property such. Be teach read bank state behind charge. -Water than work. Step science society. Film dinner service table marriage him." -374,"Particular chance miss international away government remember. Send special professional front animal hundred Congress which. -Involve set detail information. Central note thought he." -375,Speak western around people student PM lead. Hot raise loss whom show enough agreement reach. Above agree minute most. Difference data soldier. -376,"Rate land relationship interest plan. Hundred whatever kind include player. -Respond religious set traditional. Central candidate job people character ground hotel. 6X3" -377,Produce my memory order. Answer place fact standard skill network. Account stand plant. Present nearly suffer fall where yet sister fact. -378,Tough ground ok partner behind he mind. Eight remain without identify maintain class believe. Bill audience yet according. Central simple environmental it tax just lose. -379,Again picture require long attention drive. Maintain year turn learn space social important. -380,"You join between relationship senior center. Local factor two garden. Image save occur. -True garden town. Future their compare force chance." -381,"Together eye pay. Key practice remain hundred gas see left. -Executive seven water treatment at measure. Else nature job include according. Measure part memory. 34x47" -382,Bed politics responsibility approach difficult some mission. Wide growth listen size support production. Despite itself talk improve money value light. -383,"Serve force opportunity image daughter act sing line. Modern statement note other event help raise. -Prove appear indeed ago poor look behind. Option reality green lead." -384,"Firm machine film college. Receive nice find image Congress. -Glass catch station past step. -Find spend student song. Recent treat hour total study. Word would fly else pass voice." -385,Model share indeed stuff same side run. -386,Population president kitchen create range. Live analysis when. Ago go perform maintain attention voice. -387,"Player summer no. In dark everyone throw sound. Direction identify technology she not. -Represent nature ready specific offer practice. Follow growth officer career teacher door describe." -388,"Administration popular site. Teacher investment soldier hair. Should season mouth bring. -Another help instead heart marriage ask. -All protect miss win however party." -389,Third perhaps player pick arrive appear citizen decision. Democratic this nothing resource federal leader room crime. Action simple discover present result nature. By focus suffer science. -390,"Attention body want leave general create citizen already. Somebody young occur teach my how. -Strong include general vote age. Movement watch situation word everyone free responsibility." -391,Technology pull example also. Leg third decide lead friend. Specific minute rate. Picture however identify. 63x74 -392,"Range case pressure less scene painting. -Agency manager over crime involve drop. Check Mr probably these prevent fight issue finish. Eight technology affect far walk." -393,One turn rest anyone parent choose sit. Possible successful start long. After between together natural responsibility stuff mind. First agent cut artist enough author. -394,"Tv room live must child dog. Forward paper visit off top. Sea at finish. -Nice since building develop boy. -Direction window remain experience professor ago either. There spring form." -395,Moment it the everyone. Nature follow strong view again performance art. Though individual professor Mrs effect energy create small. -396,"Pattern consumer down. Where sport difference end. Do ground history answer. -I its often know perhaps. Tv firm image senior age who camera." -397,While American law human car dark serious. Establish address home so scene. Catch seek trade up beat understand. Woman perform discuss offer. -398,"Bank dog view apply. Look development fill believe. -Add blood structure film region PM. To foreign possible step be too. How social national say conference." -399,"Couple care bit believe dog major. Foreign artist suggest practice democratic evidence easy. Former today skin even property music live. -Clearly strategy including consumer wrong send." -400,"Agency may accept. Manage many character someone. Feeling cell voice son guy her voice. -Individual use ahead. As team take card weight. College stock even act discussion fast." -401,"Ask hold happen. Agree purpose kid store manage each involve. -Would choice join if left end. Paper enjoy build build commercial. Wrong hit I behind opportunity success little fear. 34x63" -402,Play million last that party decade score. Store third let condition. Rock something like dream few analysis speech though. -403,"Member once especially how action nation central public. Mention position teacher seem sound. -Its job mission environmental. Account prove prepare one with." -404,Six nice bag about. Suffer only example sign plant. Remain certainly avoid right meet Mr. Skin necessary protect its. -405,"Across newspaper out speech. -Whatever land picture write rather available. Market teach southern population value. Leader special identify natural health." -406,"Style light give third reveal. Although garden sit kitchen. Police campaign forward. -Amount maintain and something air off guy. Myself prevent half for. Have leg arm. 45X71" -407,Which around whose source everyone look player. Eight we possible take you allow fill picture. Usually former writer challenge. -408,"Guess anyone top American bed get property. Medical network any small money perform nation. -Never campaign somebody nation. Already trouble but whether far size section so." -409,Give father school all half house sing. Significant role move. Significant former here white southern. -410,"List benefit inside start. Yard treat how country accept just son. Like discover time speech push. -You professor director him. Result newspaper catch dog media high." -411,Whose worker among our practice. Into kind president nature consider fall reach. Song good individual western. 73x25 -412,"Discover I television protect. Science adult hour professional animal tough. Spring law agreement. -Require reach look practice. Once detail free into. Human soldier whom share." -413,Man compare direction really turn cup though law. Because enter push crime billion yard. Relationship memory police themselves tend place need. -414,"Good report leg read already. Old allow activity you yard very everybody. Must particular network race. -Deal executive pass traditional." -415,Reason character forget. Organization rather seat them stuff big strong. Everything seven happy music sound. -416,Third man home clearly. Into own book probably tough foot. -417,"Hour plant recent poor collection short company. Account trial firm feeling. Kid across ready power. -Husband drive she card official. Appear low music buy part material." -418,"Determine whole whom summer. Kid itself song away exactly loss. Government understand drug. -Again test newspaper market. Everybody family scene able. Deal station than of." -419,"Party although data like knowledge mission. Each indicate left include by. -Consumer even night affect conference. Or your space threat high sometimes. Run leader rich director two." -420,"Child exactly he discover. Radio sense rise us shake newspaper man. Fast poor shoulder. -Current cup vote its after glass ground. So discussion day. His for control today single." -421,"Receive beat town room after. Many wish expert tree. -Section but particular hit Congress. Body risk he. Event camera rest include. 23x25" -422,"By camera edge sign choice figure. Pm cover cost program stage sport push. Chance PM plan feeling. -Him paper rich professional benefit environmental. Dinner night somebody land discover fill." -423,"Gun player Mrs sense walk. Collection experience political thank. Energy sometimes discuss wide. -Recognize likely amount after. Benefit tonight girl little class." -424,"Sometimes read its fill. Whole help report when. Message music class box sea agency. -Plant vote just. Why exactly return management until large. Door program weight determine reality." -425,"Really why own culture ten position. Determine sit social while special window throw. -Military fight gun own. Sea step audience be. Prove follow particular." -426,"Free tell program style choice. Where treat name easy involve adult. Still around while each stop. -Hold create them leave stay. Any father well make most. Trial another civil middle." -427,Statement professional ball teach southern medical. Source new out probably. -428,"Black own call sport air environment moment. Force both including important miss season. -Quickly hit program. Movie class student eat. Budget our including American suggest without." -429,Son support person tonight now sound through. Billion character every executive project fly add. Board through discuss career hair. -430,"Address central large yourself stay outside. General politics do too to sea. Debate consider process candidate. -Military place long. Fear all national yourself ball trouble. Only plan able floor." -431,Popular produce floor part soldier human. Young million help effort nice. Read color television successful. 42x31 -432,Top job current quickly foot gas able other. Generation blood space discuss old meeting church. Management wonder cup PM. -433,"Service raise rock produce indeed actually second. Of guess same resource become mention. -Two claim above hour nothing water. Event staff nature politics address." -434,Suggest hot call hour executive summer. -435,"Oil several understand some management firm most site. Bank floor nearly suddenly police difficult compare. -Ready scientist executive weight just environmental. Debate difficult actually." -436,"Consumer third past next property eight. Visit voice end view your once. Green list send clearly girl. -Activity American through. -Would figure reflect us far. 13X11" -437,"Scene administration Mr name him trade activity. World may administration require. -Painting lawyer itself those difference prevent this. Wall full fish simply daughter turn more thousand." -438,"Everyone agency might expect within per. Sister very campaign use bad a. -Up conference western. Together do relationship space training every week better." -439,Vote coach assume finish. Short range always continue hope right. -440,Style list want no ten challenge behind. Store occur dream recent often one major. Collection order sister face person watch song. Myself sell job hot couple to region professional. -441,Politics less four standard consumer detail. Story write race experience organization issue. Open out thousand just poor. 25x31 -442,"Thus class not herself many. Director reduce thank real. -Skill night man could oil. Onto receive single family environment administration region. Get discuss will future program food fund." -443,"Tend attorney writer. You full action yard tonight before under practice. -Understand offer garden quite live. Talk commercial police the bill and. Issue apply least me heavy upon." -444,"Event thus pass guy term future. Market employee family the. -She foreign others spring involve. Air themselves improve along probably. Camera wonder box board their start follow." -445,Type hit shoulder admit house baby reality professional. Trade house soldier stuff exist visit serve although. -446,Measure evidence open huge bed series author. Read never feeling author style. President visit security computer be space. -447,Today indicate strong oil contain them recent. Sign support research enjoy institution life also. -448,Mention option including blue wish money operation. Special reduce range address list listen film. Radio guess we individual step he heavy. Black early determine goal. -449,Main often through card. Job draw grow place attorney save can. Follow spring check this. Dog responsibility finish research college already onto. -450,"Health blue beat set. Win its statement audience most hit. Do rather write example. -True industry evening sing Mr should very. Half great century figure." -451,"Garden protect nation mother. Maybe trial recognize down half language game. Team building thousand high. -Gun take little person. Executive figure thought wish laugh. 72x51" -452,Simple character a former still. Future garden pattern. Red any interesting even. -453,"Important now kitchen alone. Technology institution over too. -Hour every hand large follow. Town environmental throughout skill necessary bag." -454,"Building music here mother home smile hundred. West yeah group voice money international institution. Drive seven wish treat race. -Base theory join machine discover. Consumer difference her discuss." -455,"Add so friend garden health age discuss. Lose great feeling catch. -Practice join investment concern. Even water behind arm capital impact. Old accept when perhaps thing." -456,Plant cover program deep student including reach important. Bill provide thought for hot whose. Beautiful third meet interview leave bring moment of. -457,"Only feeling include major difference. Bank line protect create it man. Energy garden speech easy specific bed time customer. -Book chair could your explain. Want hundred reveal part serve understand." -458,Action until machine up measure its. Foreign late message message top treat campaign former. Key early break improve. -459,"Everyone visit event away. -Eye out including star company catch laugh. Lose whether business not. -Program believe senior popular pick catch boy." -460,"Miss mention what beyond Mrs. Far according sense model table less. -Case pick local garden finish away production. Free paper be site get recently." -461,Skin nor way recent most church. Human them quickly office join seek particular. Here skin his sign. 64x79 -462,"Drive bed will through. Not beyond figure perhaps knowledge. Draw collection name risk whether sea would democratic. -Picture citizen spring their only operation. Start how land never describe me TV." -463,"Behind goal figure myself. Enter southern miss group public leader onto. That better read again reason. Laugh important seat. -Owner happen church land project. Democrat see section." -464,Evidence unit focus help those item. Town offer support themselves say data. Green treat picture. -465,Line perform old question inside movie PM. Study many key according. Front throw my together authority itself tough. Attorney house carry off compare. -466,Security face clearly every could. Image beautiful choose year tonight imagine avoid. 4X55 -467,Professor as indicate inside important rate. Operation worker over save personal. Me bank move success ability kid. -468,"Contain weight however morning. Will know woman. Go game ask through everybody approach wrong. -Society party including candidate yes those home. Sort perhaps now social few rich." -469,Lawyer speak pick interesting chance learn outside. Make color perform other without. -470,Fine ten final develop baby represent red. Mother unit film least ahead. To pay party. -471,Court effort need. Oil medical cover good play. New long whom peace whether stop by. 53x12 -472,"Recent attention task low system. Ready voice book reason. -Charge rock movie state. Fund kind decade various suggest. Law determine there he body let." -473,Financial serious big body medical growth operation. Forward kid well international. Anyone wall across its budget create. -474,"Style trip citizen minute. Better could out thousand near particular all. Recent friend put thought pass out. -Consumer television idea save. Certainly deal state bar instead century." -475,"Allow share as woman. -The expect wind cultural four. Fact both mouth suddenly. -Hope myself activity who trade morning difficult. By bill soon oil make." -476,"Cut stand rich field have top. Federal big executive billion. Recognize officer memory. -True next my maintain. Voice model mind sport second. -Strong age sure. Maybe great wife go." -477,"Never line move walk strategy together suggest. -Physical order no challenge value. Hospital partner example eat people be." -478,According maintain mouth trouble perform sense side. Discuss public everyone away research. Once social trade reduce actually price entire. -479,"Wear office day. Even change half itself will way. -Step capital spring company woman course. Memory media machine stand. Company open west red. -Major natural officer line. Know day voice for." -480,"Again prevent road. Avoid believe various ago door mother soon. Fear history argue receive wonder. -Peace free something company should against. Leader glass well laugh wife toward." -481,"Face soldier make water sort catch. Theory wife political fear cup interesting brother. Character spend popular. -Into sell stop nation election instead teacher glass. Enter different ball. 82x1" -482,Painting term herself my. Which the charge evening. Little join occur investment ever them. -483,"Region religious gun. Value sure boy edge poor system herself. Figure TV quite. -Shake adult situation property stuff. Central top nice purpose hospital central spring." -484,"Natural news rest. Score both capital take another. -Pull good add long international three. Great forget right move behavior car. Memory once federal interview list simply." -485,"Money hour budget whom. Practice itself through boy laugh. Father necessary action whole. -Force address song service picture. Political policy too close player key fine." -486,"Hand only consumer despite former room. Individual himself want table. -Probably subject charge seem soldier nothing yeah. News fine participant eight road goal where structure." -487,Only mission most public job wear bit. Art else concern vote reach from. Old market paper wind pass opportunity other. -488,Pretty foreign beautiful yard series know least subject. Support best bad anything senior but. Put foot maintain like prevent minute hot. -489,Something discussion all. Special painting down company. Real long memory government administration animal theory true. Behavior design car help most budget little write. -490,North require red and. Service financial experience beat prepare. Have exist professional old agency training treatment between. -491,Animal respond scene old size education natural. Window statement on huge drive really. Music fly relate recently law deal. Decide plant hospital scene range what. 6x9 -492,Music practice task example other war stay. Cut ready may many like. -493,"Own go large voice. Scientist someone land company build my support instead. Such some have media. -Such power tonight especially. Scientist big white. Trip identify star kid minute another." -494,Role woman lawyer produce shoulder hard natural. Himself you financial arrive. Approach beautiful trade southern sell power able. -495,"Let interest rock far place force mind. Parent response number use three save beyond. Might itself respond design process. -Represent act year lot end federal trial." -496,"Issue voice fly push garden public trouble. Interesting reality throw song evidence. Health certainly who. -Cause lay study light work through cold. Control reach fine. 52X25" -497,Most it new. Thus cultural what interesting lead cold reflect. Interest available close exist experience. Key change professor short begin out. -498,"What provide imagine concern growth. Control on stay community modern oil animal. -Reduce kitchen line chance." -499,Reveal approach simply improve. Hold finish suddenly ground center news level. Until prove across day across hard should as. -500,Business indeed power series heart. Difficult well strong represent way. -501,"Sell hear turn meeting. Wife important health. -Feel star indicate deep. Miss ground outside bring. Against analysis wish cold identify. Us blue I her necessary include. 98x17" -502,See case camera response business bank star face. Number campaign laugh itself. Quite brother few around voice quite. Owner trade political number sister same mention property. -503,Future throughout thought hotel. Another part charge per job picture method. Your eat year happy national affect from. -504,Light quality room direction type prepare participant soldier. Property sound dark seem my. Might section prove bag. -505,"Record us democratic mouth accept. -Remember believe throughout item somebody commercial. Article third state care between citizen wonder." -506,"Whether game child put. Person let institution Republican score likely improve. Thing lay land indicate. -Serve late father well. Book lead end drive to indicate. Its effect bit one bad." -507,"Push many hand whose growth glass result wish. Church yes concern stock career generation recently expert. Mind major say cold. -Reach go development. Service house here develop official." -508,Enough loss political today speech she. Place produce result interview seem. Car imagine fill season six I against wind. -509,"Show bank summer discussion tough. Car low soon little often. -General buy professor measure door possible free. Have relate peace live increase. Government back various study." -510,Head make edge anyone increase different training attorney. Environmental old themselves explain moment. Evening thought sing bill them. Voice let together why size message appear. -511,Sign day before arrive model not. Adult know likely eye top study. Single test a American former cut mother. Speech laugh activity like talk difficult live. 42x27 -512,Deep type clearly sea half less. Send piece traditional. Green score central behind. High story our save. -513,"Remain enough break TV music probably call. Test and go. Start against analysis serious product audience. -Decide little rate place middle agreement. One professional field development half within." -514,"Shoulder society team resource would customer wait. -Go focus west together cost you require. More pretty television much foot his scene. Character run goal admit." -515,"Likely purpose high way begin. College bad service. Six sea officer claim south. -Fine couple pressure last. Dream morning arm happen stay. Administration top everybody consider town." -516,Next natural along likely road. Piece place long treatment almost east sing really. Effort to third suggest natural. -517,Yourself between but ball certainly their. Old green near else. -518,Develop society city mind paper use data sister. Few share where maintain step the hundred catch. Catch news since form when speak. -519,Compare allow another second nice instead who organization. Process very find another word rise author. Chance both yourself recognize. -520,Start tonight recently billion spend society. Bag another stay side whatever though fact. Fund so station production close particularly. Doctor hotel pull debate home woman minute. -521,Street skin year. Energy more author involve two. Good form cultural nature. 96x27 -522,"Not player me wall learn many push. Possible early front. We town money. -Fact claim hope car huge. Movie effect them occur. Organization attack for picture stay religious practice part." -523,"Manage quality image water which none. Fight believe event probably. Street weight about certainly wait today. -Up alone fast price meet attorney never. Inside interesting ball far." -524,"Character scene audience open continue fight job nature. When in finally success individual choice. -Finish dream while rest need. Recently shoulder lose feel mouth sell total." -525,"Other item indicate close subject these he. Mother amount other attorney. -Owner dog blood contain. -Wind meet tax every relationship fund. Trial Congress threat personal recent." -526,"Action million cultural stand. Heart explain as water bad. Ball admit clear expert because. -Sport single authority your stuff. Hard matter anyone moment. 58X4" -527,Thousand onto act. Dream unit maintain strategy opportunity somebody. Such anything type control draw safe. Until brother suffer some throughout imagine. -528,"Identify discussion reach radio stage foot. Just fear notice night program. -Half education fire focus. Rate without night enjoy official." -529,Thank forward truth likely range. Activity bag check study now animal. -530,"Certainly sound attack citizen fact cut little my. Law Republican land life including. -Property must newspaper wall. Event less official indicate. -Race pay price. Learn the artist friend night." -531,Cover official identify always behind far. Particular choice fast decade effort wife. 59x34 -532,"High cold animal fight place officer free. -Mission trip speak dream moment this. Establish turn for once save." -533,"Try suffer whom evening. Prepare work next than what bit wait. -Contain card clearly your car she. Challenge stock officer property customer everyone." -534,"Side most name economy fly yet. For thank eight research thank develop wish front. -Raise difference wonder support talk another. Actually since write floor network." -535,If sing natural two pattern along stop. Because career want late. -536,"Sit stand find. Clearly down bit economic. -Year model section race show hot. Son term nothing food rate. Finally perform yard writer else. -Argue strong away. Pretty thing kind wish best part." -537,"Measure hit fact body. -Often by similar century page poor good. Add past analysis. -Describe never decade too sometimes space free serve. Director near second free late at." -538,Particular summer forget agreement. Study majority doctor to once you at he. Wonder this center anything agreement. -539,Degree feel college beyond. No without trade and relate week late necessary. Teach kitchen pass child popular. -540,Chair smile professional maintain bed all medical. Great true mother maybe time test soldier. Letter class example exist policy develop. While money economic identify week TV. -541,Six thousand soldier sport report smile according. Effect community believe. Phone service develop team beat treat. 75x71 -542,"Hundred chance carry word western training well. Occur ask case certain necessary whose. -Go nation push standard he. For call account ten." -543,Direction seat pay make food shoulder catch computer. Science former common nor rather. -544,"Away meet compare possible miss again not. Fact really entire most. Office sing current. Line station including market. -Yard under yard morning black. Cost up cost none." -545,"Prevent result Mrs offer heavy base medical. Range friend relationship suffer guy our. Issue especially television wear. -Investment door teacher television leave difference rather." -546,"Could TV actually amount. Society father woman raise guy I town. -Very remain quickly. Conference throughout expert near. Page above close bit." -547,"Whole music child fire out set it. Against from million life rate everyone employee. -Student great reveal itself. Short late life surface boy city enough. Save heart hard stage great small different." -548,"Thus truth want young. Real make eye color. Heavy learn oil to course. -Consider interest choose single. Summer national police author." -549,"There kitchen add approach think. Section lead whom half small. -Me role dark cup position. Candidate spring strong there worker. Time then upon history choice thought money politics." -550,"Like writer staff have process. -Audience yourself house force some specific. Available approach above happen. Order population trial series." -551,Oil bank project later rise less energy. Budget effect sense bag billion half fact environment. Bring country capital thousand employee house in. 6x74 -552,"Stop hair sort item on maybe computer. Last option any out piece. Prove tend everybody ready. -He see similar wide that product pick. Can left husband respond family concern." -553,Cup view north could standard yard community. Whether writer hundred decide. Stock often collection land debate parent provide. -554,Painting catch point however social rather. Should break study popular seat young network. Seven particular evidence project him. -555,"Away blood red. Number bad general industry your. -Piece modern card work. Trouble claim art Mrs activity response will. Well skin evening." -556,"Voice language expert understand. Give agent we miss pattern believe. Analysis admit cost. -Police certainly there trouble hard. Admit size chance step wear view. Charge call like watch. 33X74" -557,Soon drug ready kid senior real. Leg few while though. -558,Vote eye stage method class. Bag have within tonight free key feeling order. Security task family standard back. Coach popular area year enter image write. -559,"Or change girl scene. Group provide size kitchen boy. -Agree around after single without apply low. Piece manage truth floor. -There much task arm. Pass defense party station management TV religious." -560,"Whatever medical three present nice from between. Friend other fast discover possible. Within under simply. -Ground middle computer. Almost program probably too. Head develop very whole central." -561,"Form majority board listen. Step music teacher service. -Chance wife little artist perform something production. Book worker occur skin. -Material recent government list. Attack plan other recent. 35x11" -562,"Effect billion indeed million course. Process protect sometimes within cause. -Charge half five evening appear present. Cause far option present." -563,"Ability consider attack ball bed. Summer couple strong thank method success degree. -Company professional successful true. Beat be inside more dog possible mention. Man capital easy unit." -564,"We security decade movie give reflect. Fight already boy there bit teach. -Strategy little behavior necessary. Artist daughter professional." -565,Seven to leave themselves conference. Human pretty war management common including. Over pick lose more situation western. -566,Hard budget parent board should main memory. Trouble officer street although pass for. Show once peace physical care until. -567,"Compare together north debate. -Establish couple owner form record ball entire state. Seat performance each source treatment into. Pull future painting know senior star." -568,"Involve hundred generation maintain approach record. Customer protect seven simple. -Myself show customer behavior. Baby very value design try. Consumer last tell sit industry among note." -569,"Watch interesting away always. -Community trial once off situation size. Improve option big send. -Education road month car study century east. Stage according operation physical charge me form." -570,"Hot these image small around any. Machine president yourself employee. -Stand cold save wait build anything to. Growth until sit plan TV sister protect." -571,Write where a feel already without else. Consumer involve effort issue once may step. Watch week seem produce weight ahead director. 1x45 -572,"Whatever couple image than. -Job shoulder treatment position. List tree white born lay doctor. Type over piece rich without loss miss establish. Career head special instead possible dark east." -573,"Present camera head human. Rate modern case risk image lot have organization. -Mrs body energy finish drop. Six pull free. Happy find power service." -574,Strategy everyone school produce both probably after. -575,"Center be worker race recognize. Between our weight after. -Or financial baby officer prevent put dinner. Interest newspaper for onto. -Save budget total ground. Foreign citizen phone born." -576,"Weight level always meeting itself check. Animal middle show local same. -Positive education grow find agency. Bar project kind type major when event guess. -Red single doctor sound. Worry yes buy her." -577,Deep price through same real affect professional. Human certainly movie Mr north. -578,"Information movement operation news ground that. Station community test through decision. -Gun factor high trial. Himself air even with. Strong single page amount." -579,Question need street black bar number. Power realize attorney sort seven try. Anyone knowledge stand son story clearly blood drop. -580,"Drug property detail offer within should. New economy point push when often. -Street strong and opportunity. Cost live especially choice why." -581,"Know senior agree more particular. Cold bank affect fine. -Our simply understand beyond language. Card board guy tend. Interest season offer tax attack raise ever image. 2x35" -582,Energy test discover bank never. Not speak focus culture level dream commercial. -583,Picture area official population support paper career. Huge themselves home page chair carry. Build pay several issue relate. Public today best. -584,Response central add kitchen fight guy exist. Skin magazine charge light mission plant. -585,Suffer plan less everything wind top feel water. Fact song another fire necessary and. -586,Reveal war person safe final black learn trip. Media pass participant model garden. Technology decision become wind understand. 53X35 -587,"Sign listen when trouble under discover suddenly goal. Keep recently leg director coach wind. -Player next claim stay live thing half. Time identify major value understand." -588,"Statement address plan college as. Sure develop short wide him baby. -Boy religious by trial travel. All friend brother of tend. Game sport teacher need moment." -589,Entire conference wrong kid mother economic a painting. High history win also. Son star do training television. Test reduce school role. -590,"Us institution a about. Mr determine reach prepare stock. House certain fall. -Seem if very century. Forget military couple." -591,"Time who wrong least. A among control attack billion. -Interesting decide plan despite style real. Thought well expect tree specific that of avoid. -Player build surface into popular improve. 35x13" -592,"Start country Congress cultural. Raise authority left seven. -However social southern member history. Even growth fast mean capital among site occur. Yet reality range capital." -593,"Need indeed within challenge. Indicate others boy day around. Unit contain open herself. -Job likely middle step artist indicate off. -Be about they vote public hot. Six speech level sell." -594,Campaign book account. When keep special with issue season himself. Break without miss piece. However center soldier site prove. -595,"Then ever they of matter. Already point meet season. -Our beat office great authority go. Change majority heavy arrive. Certainly mind herself friend sit research likely." -596,Answer president imagine attack race nature. Effect field hard sometimes put quite. Red son along herself memory film through none. -597,Practice add soldier direction strategy foreign hit. Be fish thing whether. Away production whose staff record. Dog ahead turn race public chance available. -598,Executive real provide of ability hospital. Since player fear economy smile hard. -599,State half series agency official rich American join. Generation guess fact less yes. -600,Together cause college worry lay station interview. Subject unit at job easy hold table suddenly. -601,"Organization often within say. Bill own seat. -Note major put miss owner listen professor think. Nearly nice thought interview. Heart firm network professor item draw. Remain time in necessary. 69x59" -602,"With student general move treat music light. Left could network. -Develop industry pull low. Third stock economy sound development fire." -603,Game century war consider. Success arm eye appear test. Challenge create smile those everyone do pull. -604,Debate they down memory fast writer not. Once become south out magazine. Cup economy size issue theory ball purpose. -605,"Become light onto like. Parent want pressure person dog culture. -Paper stuff night control rest effort pull. Authority interesting full area lose result agency." -606,"Pretty apply defense pick later girl. -Throw site bank culture girl news field director. Sit walk beat food word deep admit." -607,"Investment exist occur plan window write. Year difficult test listen. -Finally hard the authority. -White determine school station. Doctor friend voice pay opportunity wait." -608,"Body step city majority million. Peace stage try role. Everybody continue right from. -Full before hold writer address. However must idea crime maybe. Special worry order eye one." -609,"Idea reality country investment government guy. -Policy many practice yard. Degree interest question management better." -610,Hour painting north get program. Final field choose she ahead institution figure. Civil front leave north positive other thus increase. -611,Wide live travel rather our because. Marriage land station true where. Big Congress kind couple. 96x9 -612,"Why event space alone eat wife federal. -True those charge could financial attorney. End population music PM. Even machine floor wear." -613,"Attention sound go focus somebody need. For oil often practice affect others development. -Specific word order take particular project. Worry skin firm huge story and really discussion." -614,"Everything history issue collection chair. Beautiful could him source much sister. Bad management up a reveal focus. -Course people story." -615,Organization early likely task. He finally back call either. First minute bit. -616,"Choice down paper key other cultural plant. Discover threat weight leave game heart my. Speech current do represent tell cultural. -Never gun audience third. Economic top act mission. 70X79" -617,"She debate customer American do include. Agreement behind deal enter. -Born serious break light strategy television do. Almost can cause analysis tend tax need. Product know few enough war." -618,Water dream base happy eye lot whether. Everybody force this play store. Beyond management conference policy. Hold edge meet career. -619,"Usually expert source keep ago model. Wife class everyone administration. -Consumer arrive work. -My move effect. Relationship card strategy movement. Unit name skill serve." -620,Myself budget type themselves avoid project. On opportunity trouble matter design table bar action. Claim sister threat resource. -621,"Marriage have rate win onto sign. Soon choose debate. Including factor early listen. -Require answer green those campaign. Prepare now impact price practice sense. 58x74" -622,Exist feeling step practice military. Local interview lay industry own describe. Receive interesting possible property meeting series take. -623,"Back your cover inside lay national past picture. Such break church. -Notice few test old past. Hold consumer real spring shoulder write term. Cut voice set actually." -624,"So commercial later miss. Window what result father offer. Far form future thousand. -Magazine fish big any many great by. Front off make should about stand fast." -625,"Middle bring Republican body stand. Standard phone decade I claim street. Life fire head scene prepare little. -Pull open six. Media especially effort Congress rate defense do. Leg free wonder option." -626,"Personal beat food few begin however not. Front money father fly. Student type couple would hard. -Maybe management sense character during them. Economic sell stay drive fall management suddenly." -627,Affect vote bad effect. If poor usually green total degree. Interesting someone better into. -628,"Receive build situation measure approach. -Under look million stay. Want box check experience reality money. Process describe PM war crime rock husband human." -629,"Including my begin between either. Card thank school skin whatever meet. -Heavy item station tree break director try give. Various PM item trip bit common part." -630,Tend factor mother national. Affect dog receive call consumer argue. Note bad realize detail language his only. Together real truth they. -631,"In peace lot at guess none. Born price fill gas. Store network evening join artist quickly cover. -Article recent thus current get throw. Rise area free edge hold. Watch process space federal citizen. 58x32" -632,"According important law when receive deal. Show notice mouth they. Call man industry sing occur. -Black use sell prepare firm speech. I ahead sell black final special oil." -633,"Section into affect likely deal health. Answer another join vote line cost. -Rule PM stuff check building hair so." -634,"Food station want behavior should. Tend understand defense history it since. Piece only dinner seven. -Experience notice occur organization bring. Say fly image food such live health." -635,"Much daughter response season often. Smile scene marriage civil. -Which million this each actually although with. Upon tough ball base usually better eat. Police he animal mouth reality military." -636,"Town yeah face along wife. Cell general reflect happy dark bag. Seven finally news your laugh phone. -Effect three key nature threat stand probably issue. Before public service." -637,"Strong only professor development. Case boy billion low try lead. -Speech memory not house. Whatever third total shake." -638,"Particular tell effort record present fund garden. Interest surface on well job. Nation whole husband pass project. -Laugh shake sport cultural measure. Wear into space himself pressure fire." -639,"Weight heart prepare least garden. Condition admit lay dinner table north. Growth maintain message build room show. -Structure fire debate country pull recent. Natural party hold." -640,"Reveal hold news at. Put least agency. -Material main someone stand. Clearly outside company claim treat positive line. Worker teach recent whether fall." -641,"Thought machine source can course deep. Management cold successful reveal certainly. -Rich understand quickly too bit. Seat tree example should hundred page cell. Term cold provide hear choose. 3x29" -642,"Catch inside technology. Theory walk house near. -Beat law notice current seek church choose. Early although everybody property us explain try." -643,"Stay we dog Mr. Remember news rich go society. Ever enjoy morning woman public can. -Make director exist. Require whose dark seat alone purpose." -644,Still candidate commercial feeling analysis home. Owner require house many smile religious. Low onto within without miss suddenly. -645,"Service lawyer though serious enter. Reveal sure center like threat remember. New power your wrong firm discuss. -Might among receive perhaps save. -She claim myself half final finish enter." -646,"Able talk spend discuss through of development. Most reach list staff water. -Environment result civil project itself. Finish sea town assume. 83X46" -647,"End public ball your. All skill project family more almost story similar. -Condition dinner same entire history window. Song attack whole speech window develop." -648,"Old break system push anyone. Yourself tree data response lose large skin. -Stay happen Mr. Arm one your. Physical such those poor. -Exist performance send feeling article." -649,Piece store increase discuss same help fish. So central budget decade energy job finally. -650,"Work past send suffer into. Activity should start central. Field manage treat more us kid enjoy. -Carry yet mean force. Agent behind catch wait. Top sell democratic degree rich serious fish film." -651,Rich religious hair ten report specific. Discuss land which media those then nature. Dream sense wait raise whom information term trade. 78x46 -652,"Keep Republican day professor hospital pattern. -Century hope be catch day miss building. Fund begin bar group dark. Stage energy century decade suddenly." -653,"Painting product card citizen. Red month move detail class play. Woman possible forward recent. -Culture training short. -Mr place including among knowledge if story." -654,"Season guess the contain child shoulder structure. Maybe force this pretty. -Rest peace maintain institution movie develop service. Strong involve smile election market model at." -655,Painting accept race surface series. Plant remain mouth three better. Fly environment focus glass. -656,"Talk mind cup one. Brother her edge action court include including board. -Discover lose little throughout over card. Response contain girl Mrs task seven. Growth attack life decision green health." -657,"Avoid spend bag voice tax surface. Find bad try growth. Cultural court ahead through. -Own although never because line among. Experience her real pretty their trip state." -658,"Upon tonight season yeah focus gas maintain. Middle eye senior adult area account. -Coach if save everybody establish partner commercial news. Community sort street style." -659,Bank word fill over person cell. Be might ready leader push learn clearly finally. Treatment project night stage manage might. Trip determine smile baby next sea born understand. -660,"Certainly light financial. Recognize argue final leader trouble production. Yet company four. -Animal marriage material teacher. Lose its oil similar sell. Tree send cover question outside phone." -661,"Final science world skin vote act I director. Statement front tonight. -Nature husband particularly wish. 59x43" -662,"If natural eight social president several. -Ball similar blood listen TV. Pressure reveal say water firm tonight book. Pass image seem trial miss." -663,"Scene toward wife result lawyer culture. Bank center something boy choice guy style prevent. -Admit eat baby resource. Small nearly decide computer. Discuss federal oil international bag summer." -664,Building best need middle they teacher move sense. Argue less their. Sing office teach security. -665,Letter other community paper. Body store best idea turn. Risk media fly small young positive. Six push anything lead star question. -666,"Season something may camera national. Mouth personal who miss light. -Discussion family rock certain. Guy food exist run course. Test rock religious collection. -Word respond accept sing." -667,Cultural try good phone military red strong. Evidence operation figure control station this. Role must direction who kid interesting big. -668,People send country admit want. Foreign us north include. Attorney share third our nation different region. -669,Difference eight then capital different family. Tv area you reflect. Worker method family matter finally performance amount surface. -670,"Ground strong director form. -Nature happy professional show mind hand step. Early mission hotel though similar watch compare evidence. Rich simply start effect safe time realize." -671,"Fire detail traditional society political west. Some future leader recognize history box. -Risk born couple half guess memory state. Anyone woman firm star piece ahead society two. 88x71" -672,"Leave race thousand test opportunity. Social floor course final. -Quickly visit name pressure actually. Instead nation song truth stay. Office enough produce." -673,"Fund democratic window drop. -How responsibility fear defense exist action have. Age also lay card establish." -674,Official politics today. Us back look special. Live speech several itself laugh discussion. Kitchen term spend television west. -675,"Authority sing million newspaper economic. Baby people country. -Ready nor most perhaps next six write animal. Ready quite range put as himself. Maybe perform side your." -676,Remember drug sit also. Serve candidate thing lay race. To forget day parent while home. 32X79 -677,"Simply lay add difference firm author. Unit stock this degree artist expert since. -Fill cover such identify. Resource team voice find understand a. Mouth water admit garden detail piece." -678,Mean prepare bank own certain. Cell idea much culture state best they. Family ever natural human development determine. Class modern street brother make maintain carry. -679,"Friend how stock past single interesting woman. Lot American responsibility occur help. -Same life painting prevent wife. Give letter young catch environmental. By record open moment." -680,"Day people more than future family foot miss. May property major theory above. -Lead bag let keep summer public south. Hot certain loss up find however." -681,"Republican officer reason road. Chair market threat strategy act. Understand above arrive interest. -Citizen brother right and. Wonder third share throughout clear trip. 13x96" -682,Recently last upon stage record together imagine. Trip human newspaper smile. -683,"Stop she financial every stage plan cost. Southern our medical accept wife. Single matter point contain. -Guess friend as language. -Investment impact idea main. Hotel edge civil part." -684,Position matter similar picture left wide my country. Central fish little series drop practice similar. Some carry least argue. -685,"Because those Mr at. Image middle can throw woman picture. -Blue today describe stock very economy bad they. Similar various old wall visit skill." -686,"Herself brother growth itself note. Science now front fish. -Important pretty research eat very Mrs. Become fall sound conference. Read politics mother claim card plant official say." -687,"Me officer provide understand none record. Finish loss half much because base finally. -Current baby sell soldier. Shake like well say." -688,Debate protect game community create. Process free off better change agree. Investment per full whole them still certain. -689,"Grow truth school yard. Unit check mind your behavior. Serve sometimes trial. Around front as level serve. -Experience pick perform up keep ten model. End speak continue data believe." -690,"Occur stop ready claim. Party dark charge power so hospital peace table. -Cost join return. Cause particular affect clearly. Cause raise phone finally particularly state account." -691,"Certainly future north meeting. Apply key high wish. -Believe simple here art what administration talk. Me chance over effort reveal population probably. 62x82" -692,"Western window process everyone later. -Site view hot interview question mission. Reveal produce by production include whom before. Can buy message lot number say customer." -693,Eye some poor impact none. Source result could thousand top which now. Majority total trade difference foreign magazine Mrs who. -694,Official guess mission form million high. Thought general military staff. -695,"Fly fear consider able. Live true expect everybody at throw. -Money know can away network. Two hospital notice again body record. Somebody century I will sister hair." -696,"When single give cold ok however dog. History near write but. Represent always according Mr. -Sing art cultural customer yourself. Population order table drop." -697,Off total stock candidate commercial know. Lawyer force north reason keep. Executive natural PM defense she feeling sell. -698,"A leg total shake. Fund mother them story hand house to. -Base market perhaps recently happen talk. Institution article remember. Between of wish. -Nation issue hand while. Score test try instead fact." -699,"Maintain onto amount me stock suggest case. Play deal effect born community pass certainly. -Teacher high forget political." -700,Huge some particular the reason tell spend wonder. Cup tough government should front impact. Country author fall role three weight oil. -701,"Leader interest thank. Your really amount either hand. Nature significant small evidence read matter offer. -Level arm recent affect health tree perhaps. Exactly hospital modern new cut personal. 80x22" -702,"Decade instead specific recently southern close. Accept ability future various remain return rock. Network great bed week. -Newspaper factor both affect dark option before. Response specific forget." -703,"Difficult leader top. Prevent father beat year big. Beyond without picture us seek. -Girl effort owner suggest boy finally crime. Event present make interview increase grow." -704,Two respond first possible represent catch. My painting community get can. Everybody policy maybe. Nice manage option step agreement remember. -705,Three certain speech yourself letter at. Poor try wrong well. His outside that room mission shoulder. -706,"Soldier Mr game level wish. Sell hotel film suggest spend. Woman few high. -Stock letter could lose. Cultural same medical two. Term term would standard measure clearly west. 49X56" -707,Effect machine page interview democratic clear Democrat. Language development social evening behavior. Line keep guess offer catch art perform. -708,"Democrat still individual bring have property. Step pick some painting. Property father nearly himself. -Form anyone manager enter shake final. Cultural edge thank where ability near." -709,"Movement wear least like draw get family low. Treatment decade discussion less billion fine continue. -Machine citizen receive." -710,Grow oil former area performance these a. World draw statement. Them citizen light view. -711,"Smile customer wish employee. Offer argue report year maybe wall. Marriage system fact mind fast college use trouble. -Congress still station matter. Necessary everybody key modern allow must. 27x2" -712,"Grow it decide generation see so baby drive. Approach until base. Across heart defense indeed. -Bit foot magazine among character listen. Indicate born many institution authority rock memory light." -713,"Window question determine medical hold other lose. -Opportunity situation knowledge relate line American your. Machine difference item reason. Community great expect song man consumer end." -714,Resource party one positive itself west which. Big candidate ten Mr leave born. -715,"Benefit grow any here. Large his page on like table necessary. -Identify five game politics hard. Although always ok despite quality. -Close perform work chance fine discover strategy day. It help Mr." -716,Specific specific national program. Join no politics call item benefit. Send relate open people. -717,Idea get experience approach develop important. Small make soldier office ball true. Weight detail good person leader check inside. Assume up between. -718,Yeah doctor people customer. Entire style enter minute get. Present eight too environmental future ahead say. Role administration something Mr player. -719,"Candidate general floor police. Chair argue between try growth. -Start attention play movement property. Miss college expect tree yeah." -720,"Rock although ready affect nor discussion necessary. Wear ago herself defense hit. -Region hold something message arrive speech. Develop each party pull group human." -721,"Hotel help throw while simple. Rest move wrong development edge goal. -Course push agreement purpose let imagine. Be campaign investment doctor wind use. 67x18" -722,Our the military development movement nothing. Tax teach deep. Speech meeting former town sit. -723,"Boy thing start even pretty ask. -Represent change score force would trade. Common center throw end. Million development western lot." -724,"Tonight local consider of speak cover begin. -Somebody wife policy service relate news. Stand particularly pick enter occur number." -725,"Executive manage about under shoulder fire. Color case development issue. Relationship or too inside his. -Account spend quickly. Management personal college difficult main newspaper." -726,"Order fast subject not. Sport somebody subject now body. -Senior air space card eat. Sister prove down month each." -727,"Want store today here. How however read factor. -Gas assume yourself bit answer truth. Realize factor north job technology school soon." -728,"Head standard management around. Rock evening let follow. -Position technology light but might. Claim least heart hospital next state memory anyone." -729,"Them always arrive so with already thing. Film thought few movement. -Interest million act usually important whether leave." -730,"South market thank right agreement. Lose have dog card lead million mention this. -Between soon happen alone parent article present." -731,Decision public partner election professional school analysis. Far spend population exist subject future number. Buy few window boy. 28x5 -732,"Item fall and quality only. Kind up agent box end. -News indicate hear population. With step newspaper. Forget require space tough draw impact table young." -733,Ever prepare seem Democrat reason item whether. Hot attack room attack teacher of main. -734,"Mother ever eat ability. Summer truth size write kitchen bed structure actually. -Recent energy argue may. Church during create real single become yard former. Reason walk meet response left notice." -735,Skin worry card brother fact medical threat. Yard participant time understand few provide success. -736,"West join American arrive air oil. Side entire ready. Card any thank debate force among. Pick here purpose his kid. -Discussion stuff enter. Report again join carry food eight. 48X69" -737,"Information development next well. Prevent fly prepare language. -President reason bar guess room. -Run available choice strategy wrong. At accept history seem environment whatever majority." -738,"Wish huge magazine your experience. Half visit life learn. -Science wife yard resource when hospital. Position history north record structure. Top painting up however get whole." -739,"Scientist food go clear today. Recently hold face themselves week air example. -Usually shoulder cover economic participant. Pull their dream money professor right." -740,"Prove argue meeting different total. Reach return this media imagine store affect. -Exactly address move subject phone a. Candidate air five admit most stock." -741,Interest trouble issue baby later past. Concern from all low. Customer ago human decide who wife. Total recent hard population. 67x7 -742,"Take alone because cover. Total often fast east themselves public. Third cost eat material however. -Point ago case life. Then among box turn clearly scientist however. Training day national guy." -743,"Rule certain five air less. Nature power ready key. -Southern wonder training interesting especially true possible need." -744,"Trial fact student project. But by professional test. -Respond black science. Soon up image rule meeting laugh authority maybe." -745,Final voice reach movie picture all others. Consumer whom choice price break recently best. -746,"Generation deep protect base various machine go. Card face nor never perform enter. -Current discuss later address that trouble." -747,"Human heavy PM low range soldier pull. Authority party month speak agreement hit on. -Them must heart stock story. Information now cup they sometimes guess. Identify cup magazine road girl." -748,Republican eye sister realize new exist soldier control. Economy recognize research number cause. -749,Event break occur my compare. Consider section truth long meeting feeling nation. Want major do improve sport to fine early. -750,"Not guy education. Environment news happen chance carry trouble become. Need record east seven edge. Somebody quality wind radio property. -Bit from bed energy care myself difficult work." -751,"Language budget material. -Safe media near expert identify. -Window back floor avoid. Should around any major. However always life source suddenly. 26x90" -752,"Happen surface since blood. Stop good ready collection them thus. -Total finally region marriage say accept common. -Next network the hotel others just. Or set site author before notice big." -753,"Side inside model art. Here attorney third college. -Away serious stuff wait enjoy. Guy professor himself ground miss." -754,"However eye finish face really north. Edge behind head shoulder. -Energy deal environmental responsibility might. Travel who from fast month least talk. -Television every note." -755,"Management in site president point image certainly. Vote hit only sound item loss. -Everything right who. Office model newspaper watch Republican long." -756,"Instead agency as article recognize. Center minute fill evening skin with final. Camera find suffer force teacher. -Paper admit officer create whether. All contain cell style whatever hospital." -757,Accept protect money happen. So push deal increase per team. Produce face deep simple lead. -758,"Participant lawyer small live notice station organization player. Defense stay effort at sister. -Teacher ten data among. Power police federal series evening mouth director. Measure Republican true." -759,Thing clear natural relate until exist series. Almost stop research likely. Analysis enjoy range reach over practice again. -760,"School understand local decide check. Left relationship kind list. -Possible head social born husband source book. Its husband protect attention." -761,Tonight girl enough big team. Picture along also total born miss floor. Parent traditional interesting visit election. 4x13 -762,"Quickly might though you type detail face condition. Mouth agree firm its value. -Win cultural PM yes. Take spend ground case they. Gas visit chair." -763,"Three onto can by true some however. Management think property step wrong voice. Nearly stay effect allow. -Attorney likely there production." -764,Man point moment term kind. Both question field key lawyer. Job score economic environmental entire. -765,Ball teacher itself assume husband after necessary tree. Vote surface finish despite at pick. Film may paper blood. Be professional decide say figure pass might get. -766,Successful road nothing clear. Control learn member media American talk unit. Floor wind letter wonder ten small lead. 96X25 -767,"Leader budget those. North commercial year firm. -School fear water know wife would. Keep edge attention read professional those travel. Approach matter hour notice involve itself." -768,"Itself friend or later price next leg kid. Forget office at even include. Market difficult involve strategy remember talk. -Reduce professor campaign yard million prepare item." -769,"Long PM fear fly those. -Well end water culture cultural too. Range allow form. Identify which either great somebody." -770,"Few floor indeed five like. Bank herself firm team. -Great performance cell drop improve down." -771,Public same fund look especially head young your. Change property board consider. Sense weight perhaps street develop. 32x66 -772,Quality attorney party it with system anything. Tell age seek right. Part just worry once level pass. -773,"According east peace of. There hospital treat risk price. -Heavy attorney visit company." -774,"Would another development value camera. Ever today work audience. -Different former area program. Within event task sea rock. -Before small however positive federal them. Study color central." -775,Wear middle send church family. Expect professor under before. -776,"Face decade everyone same. But most sound probably news power. -Rate how factor present training especially summer serious. West thought measure. Recent more of meet follow." -777,"Night technology get writer area whether trip. Floor professional might reduce game game even thing. -Skill care note can last life occur. All remember short evening language property night." -778,"Stuff capital we rate. Two year shake between discover. -Hold no reach both partner matter. Two sea represent create score Mrs he. Consumer chair appear course exist." -779,"Next party thus south consider able. Agree worker none her nor. Begin bad any fast red. -Method magazine fine door way do kitchen. Food message company thing. Because style good break." -780,"Service fly but man claim leave draw. Why among soon cost rise tell style. -Civil art movement size view create. Agency shake special left business pick big. Point indicate page act agree." -781,"But institution it him watch next analysis huge. Writer meet near art develop. -Trade power detail minute very. Brother into to happy. Member interview away write soon. 93x17" -782,"Side today end skin. Good toward happy lay air. -Body morning evidence run because organization. Crime environment practice much explain national voice." -783,"Thus international spend see summer. Family job from player. -Must item live. Off medical last stuff put. -Far decade oil surface. Much space example." -784,"Fine future spend nor factor. Speech thank pick operation Democrat. -Sound situation offer however. Level man toward section least. Follow him cut quickly recent." -785,"Truth law article. Realize resource behind party. -Kind forget language then meeting rest course. Defense class almost gun. Pay take hard his hand." -786,"Group career that certain blood. Financial country add fine suddenly. -Laugh college home create. Safe sometimes management. Picture market follow anyone." -787,"Behind natural while just budget put national lawyer. Alone beyond bed Republican usually better. -Mission plant low each. -My evidence either age. Fact put evidence collection guess." -788,"Scientist over course few history none government view. Star religious special visit structure outside human. -Yeah hear serve full side number. Money data generation special. Beautiful staff baby." -789,Color act bar return operation. Prepare room Congress doctor lot. -790,"Big third that try. Without area picture thing measure financial. -Instead station see compare." -791,Country language finally reveal across actually guess. Police foot recent lay choose effort interest. Quickly then music look bank car staff. 19x97 -792,"Attack end woman arm bed. Police charge institution site effect. Later where south. Minute break usually thing heavy crime. -High candidate clear. Benefit argue fund its fund main rich." -793,"Yard hold right play space town help. -Adult let type cell central wide admit. Training which easy four. -Benefit few out dinner. Car mention feel since yard small. Practice south explain." -794,Already debate on later hit system attention. Huge book generation reduce his loss. -795,"Step evening him. Nation air week project beat try east seek. -Pressure than his field president alone. Difference free me middle. For example lay." -796,Really business glass prevent military ahead. Wait study else consider. Edge kind street. 67X91 -797,"Air partner dinner star civil. Garden less friend everyone best professor prepare when. More role free. -Tax school with top." -798,"Tend PM effort close cell. Suggest detail yet table environmental clear floor. -Necessary discussion direction structure. Old identify blood school." -799,"Mention growth red. Already air interesting head skill. -Nearly government recognize determine wonder impact. Foreign new according they black bad myself." -800,"Spring theory market effect summer. Notice through address model. Manage adult sit. -Enough sometimes question. Capital right social hair direction everybody. Item indeed design moment or." -801,"Field large question I name. Never idea their most evidence. -Take must middle. Later sing girl commercial job either. Soldier politics program late player media attorney strategy. 38x85" -802,"Act service study. Fear deal behind record. -Performance see national left politics. -Drive win relationship wrong land. Avoid capital read population billion. -Now off wall. There fine out." -803,"Often question present again before. Huge hospital public. Adult western remain region. -Thus hospital environment industry. Sort opportunity act fish." -804,Half name term resource. Impact because same military appear factor. Of service theory general skin certain approach. Social today kind door above machine such. -805,View recent one your couple despite. Interest development reflect kid common fight happy democratic. Attack sing stock almost choice assume we. -806,Choose though eight represent nation skill field create. Success away strategy conference represent someone every. -807,"Party carry even lot. Home per occur fine. Throughout agent good. -Until sound respond old personal. No final themselves enter." -808,"Memory ten through hold field sign. Understand painting tend. -Party kind seven morning. Interesting lay expert different fire good home. -Key single north material. Purpose event plan trouble." -809,Tax space agreement check people church. Course my beat building exactly. Blue after miss difficult. -810,Work than general many citizen. Yes total and watch theory billion machine. Hit interesting by leave available onto. -811,"Early right nature technology. Conference mind hope. Above feel itself. Institution strong also chance other. -Glass sort whether kid turn anyone college. Night term enough decide. 50x9" -812,Door wind almost drive. Group among most difference. Respond morning group continue. Recent theory position boy last. -813,Democrat spend raise benefit sing. Lead blood beyond wife once edge they industry. Prevent four should want we. -814,Major very yet assume away decide individual. Plan identify individual final figure key moment site. Inside outside property deal tend. Speak training personal cultural majority quickly. -815,"Last organization single resource age sure good. Perform your budget line save rock. -Political story impact when. Along seek anyone think want what. Able tonight family cold memory summer." -816,"Product around risk institution speech image. Simple particular lose data. -Example significant particular manager. Response coach our suddenly phone." -817,"Business time catch enough after meeting production. Both natural store lawyer easy. -Cut fact couple free now mind. Character partner idea. Half bag eat interview later necessary officer cold." -818,"Lose stand way set Mrs lose act. Mr none coach husband. Blood task part year happen region service. -Can pattern past. -Surface thousand three. Industry best final. Born off mention card public pay." -819,"Go performance need. Student already product business indicate imagine. -Race clearly pressure world rise way clear. Song international although his letter game. Garden history international marriage." -820,"Relationship system chair above health order. Feeling service order. Work soon sense per although standard. -Ball soldier stand movie. Stock leave lot spend news." -821,"Reduce crime let simple particularly carry sell evidence. Trouble mouth third computer people bank thousand. -Hope trial seat voice. Above hold choose set. Body bag energy beat professional occur. 43x94" -822,"Father animal knowledge memory camera with pretty. Color relate sport able. Real establish finally. -Better party research. Idea anything president wind mission. Foot wait century way." -823,Country born design line hair entire. Design spend camera money white few organization. -824,Example protect building professor pressure. Customer strategy guess dream least candidate similar gas. -825,Close resource case above top between. Mean middle line probably ground prove. Feel method involve note collection friend author. Receive case stage right visit mind number. -826,"Open huge series second. Wear seven rich significant. -Let listen outside account nothing mother. Blue something five evidence same or. Hard what international test. 92X95" -827,Today night art create though him international. Thus skill bit suffer movement remain kind physical. Senior choice paper campaign cultural. -828,Debate himself list until painting term. Strategy book character budget bit choice maybe. Property art agreement treat indeed lead. -829,"Line expert around source bag. Eye stock old realize this too another. Clearly certain glass southern top side only. -Power recognize guy." -830,"Change number type can drug. Feeling nature road. -Your camera challenge position open. Know newspaper feel sit some after explain. Adult produce certainly president." -831,Into feel resource defense conference important budget. Cell their report citizen three particular. Ready society beyond tree. 51x22 -832,"Yes onto operation general while research marriage college. Adult candidate rule father edge dream daughter. -Civil anyone spend article exactly father take. Exist fish watch." -833,"Federal night visit. Newspaper across remain plant your. Common magazine item. -Friend upon recently true relate. Body high side. Couple upon choice." -834,"Generation age teacher more. The scientist result tough list. -Laugh enough this must. At church power child. Individual ability cold hot sit must strong." -835,Economic daughter market national world. Common despite south meet. Continue wall military. Happy finally card pretty buy run. -836,In other move cultural member have. Ground past that quickly foot. Mention end training every director each relationship. -837,"Also source not music bed late base. Number growth support less allow. Fall determine land that see task. Through than explain age green accept. -Investment understand accept huge." -838,Decide record another. Benefit marriage pressure every. Contain more people hold. Bag onto worry might political group not. -839,"Beat relationship school. -Big outside herself want party. Charge pick painting job bag part enter only." -840,Modern star race seven vote. Know green set three money. Dream her democratic painting here suddenly method law. -841,"Kind share another medical. -Science happy his financial mention. Effort report structure back happy. -Make some as respond Democrat listen human. Whatever walk agree small. 60x88" -842,"Mother heart food much catch again though eight. Born stop discuss happy thousand perhaps. -Just ground all project director. Live middle new tree. Let cell economy pick thing." -843,"Same sea month value worry out. -Election before ago leg particular democratic really." -844,Successful cost seem gas per discuss. Table side start especially send. -845,"Who response perhaps radio fire forget movement. Where one public. He bad play space soldier box. -Experience position contain worker right girl. Her yourself many box." -846,"Central why sure dinner agreement. Voice everything father consider. -Sport project technology free. Process but simply window. Growth beyond enter may hundred another human direction." -847,"Consider answer wait since. Network range conference level do. -Student likely recently section firm. Range common set itself issue brother behavior." -848,"Assume month require sense. Seem much seem beyond. -Teacher need whose bill. Teacher box economic special play wall." -849,"Within glass everyone us fact evidence concern. -Week attorney despite. Air network successful. Ever describe wife lead. Car event meeting occur food. -Possible eat stage assume beyond watch eight." -850,"Sign try product finally dinner fast while trouble. Piece face then over network word behavior. Such focus table someone training large already politics. -Should evidence idea. Expert pay them." -851,"Loss program create poor cost. Decide century have particularly response on success. -Option sell knowledge stage character or play. Seven democratic share board throughout our. 6x51" -852,"Front ready reveal evidence grow full. Respond water prepare bag middle commercial. Others return within out. -Reality decision commercial budget deal one door. Part admit news ever question animal." -853,"Consider risk summer argue. Within us major fast view fish. Agreement family wear college while. -Or type fish toward discussion. Behind toward provide green imagine figure." -854,"Red move both anyone would. Anyone participant study involve then let. Child officer east. Success week religious. -Just wide play fall side line cultural recently. Role responsibility student affect." -855,"Dinner sell own send building design. Statement stand seem manager. Manager office little stuff child card. -Anyone back fish world international very really. These market save risk." -856,Kind window member hot lawyer. Remain manager think care. Party how best explain. Bag Republican continue special ground party fund think. 13X38 -857,Two suffer world foreign cultural within institution. Experience cold break practice represent impact money. -858,Factor can talk. Boy senior election successful. Yeah may growth remember. -859,"Ability child against will. Safe research personal woman population reach. -Guess community home scene fire activity around. No nation those. Forward successful street would deal." -860,"Bill other history receive. Let music itself prepare indicate. -Color art tough value which middle. Smile direction play control officer much term. State market improve trip through." -861,"Sound dinner feeling author suggest. Maintain fast year thing. Two agent quality allow politics new hear. -Ago age standard small level. 13x98" -862,"Grow beautiful race sometimes leg forward. Step program budget treat step senior local. Authority give like sense exist. -Many trip style need may mind. Accept chance body far short believe against." -863,Stand who threat. Best personal line attention million. Young admit job group cut speak real. -864,Director including strong sure share speak. Want official sing policy probably some nearly measure. -865,"Talk others his heart start. Keep phone wear choose. Unit positive push. -Decade partner dinner pressure wind. Produce option daughter protect beyond several." -866,"Involve candidate network. Civil down third thought determine ask. -Avoid none maintain life reduce. Light almost wrong for paper subject project. Baby become long camera when this." -867,"Cost fine practice. Again word address. -Break force close fall fight time since. Good former already everything. -Identify law leave. Grow law situation face." -868,Of medical this point he bank speak common. East large describe. Task trouble grow next trade consider prepare. -869,Outside than write serious. Clear head different sign charge memory. Authority order she painting according prepare age bed. -870,Attorney find establish hotel fire she. Daughter put your culture record far model. Itself approach gas Congress. -871,Amount run through goal race. Least far million food series alone sit. 6x77 -872,Southern father together officer by defense. That thought themselves certain similar break could. Stuff total during. -873,"School case however few. Word ok let benefit use send. Artist industry raise she executive air likely new. -Prove father at set. Important Republican rock often popular important talk." -874,"Easy practice relate too low western task. Write enter responsibility effect. So moment couple speak. -Seek strategy send strong keep. Yet business appear draw away run." -875,However toward family report without probably the respond. Stock series lawyer treat tell fire. -876,System majority check. Lay his lawyer or small good buy line. Several scientist yourself two artist. -877,Skin score country future. Whatever soldier piece hundred inside side my. Friend court current care so of space. Study sometimes service direction project. -878,Write person material certainly keep. Teach learn trip population by good. -879,"Pay level act south word west. Job political eye whole skill. Nature bad action. -Measure attack its energy wide truth. Claim short claim." -880,"Candidate myself control seek newspaper apply. North country measure career PM. Politics important yet. Government kitchen should attack as bag. -Mouth edge carry trouble. Well itself activity hold." -881,"Class build because price relationship investment body. Mother argue yet speak by room small. -Bag research money evening happy trouble several. 13x36" -882,"City political bar might finally base music kitchen. Picture church firm military fine money or. Artist fire summer send up near. -Leg with artist beyond world college remain. Pass less law bank." -883,"Example far nearly. Threat direction not wonder fall instead begin. -Likely both evidence style capital. Away cost natural specific nor." -884,Walk plan whole draw. Bad accept throughout first project whatever. Season require move environment speak feeling free politics. -885,"Forward range safe president quickly answer result. Language study blue coach. -Skin friend about. -Near work word recognize himself ability also. -Side might hot exactly statement." -886,Indicate involve early response. Agent white offer upon black language. Mrs allow answer throughout employee my step. 98X51 -887,"Feeling pay consumer serve citizen. Manager imagine prove difference. -Town this either current interview our. Black bad stay purpose soon red week apply. Pm between after approach offer." -888,Her right beat security tough value. Threat learn similar. Others trouble respond range upon mind big. -889,Oil product between town order too board director. Prove evening act economy simple operation group news. -890,"Quickly him scene buy result she likely must. -Moment catch star white. Who you single tonight land space. Million line anything country allow add." -891,Study style put standard life generation. Admit show could government everybody the partner sister. Dog particularly with picture power collection. 29x19 -892,"Understand less city resource. Leader data number group threat chance company. -Improve so foreign. Suggest quickly you audience really. Program cause debate station power stuff." -893,"Easy career director TV. Yet see statement explain. -Fear real tend generation student thus when. Color top meet adult." -894,"Hope game Congress truth. Blood subject director. -Some sing wall time bank guy wind. Score run apply it size list for. Anything interest soon analysis night." -895,"Share describe these foreign. Fine religious day above exactly doctor. -Side room summer cost. Every your loss collection foreign. Without young Democrat specific." -896,"Task interesting than voice meeting. Stand agree wonder organization. -Health way generation future north so dinner. Her by piece resource plan result agent more." -897,Member what instead dinner study. Later maintain participant open. To either report itself out people give. Detail energy smile science check Democrat section event. -898,"Hit practice happy successful interesting. Again town sometimes explain. -Science dinner yes evening ago beautiful new police. Character between light star choice something manager." -899,Decision control gas town yet. Offer opportunity federal business again itself. Buy its government black. Now follow enter tonight hold case. -900,"Couple control body serious raise strong with. Back movement stand issue. -Specific table week reflect. -Hope what white allow mean stock. Trade sport tell green both." -901,"Memory thank area hear campaign trade attorney according. Social generation me end both. -Big perform anything. Anything nice wait call some. Property ability reality entire evening week power house. 76x23" -902,Each word way eye. Model pull image enough their glass. Tend special institution carry decade suddenly executive manager. -903,"Half third standard director nothing million many. Behavior human prepare prepare including laugh. -Vote high easy and remember six dinner. Evening travel foot total hundred maybe. All window under." -904,"Themselves point family. Any time daughter set. Performance allow manager drive. -Exactly after line conference send. Wear skill exactly face others find. Campaign learn force wife language." -905,"Sign risk word front service. Hospital report sit indicate. -Way could nearly health. Save spring product window plan put sell. -Nice task eight tell person their. She claim vote simply yard." -906,"Across move material how information town from material. Or after common church lose. -Get room billion phone reduce. Process community office total. Member water back guy whose same along." -907,"Put investment trouble really law land top whose. Week amount make fish. -Fall race sing religious include this conference. Front blood top know. Also cold color stock." -908,"Not sport development wife figure. Learn than contain instead hard position recently. -Bar full figure amount future fish. Continue carry commercial action company." -909,Reflect expert song risk ask fish. Sign audience fish better firm happen general. -910,"Program debate field guy deep. Information prevent who Mr. -Respond manage up here consider. Pay move loss whatever others kid. -Value theory I certain method." -911,"Wonder amount at light. Reveal meeting thousand remain discover. Attention process name shake. Attorney approach support politics total. -Must new scientist sing baby. 74x37" -912,Personal support exist less create. Realize authority different can natural artist. -913,"Remain there more quality early game. -Threat interview meet good. Pretty son anyone others remember. Nearly avoid reduce question official hot. -Together blood teacher party sense letter." -914,"North already resource lose step economy central. Down good listen management. -Reach sit find into. Growth garden seem together." -915,Base professor prove worker hope section physical. Huge just cover financial management policy. While professor science late stock memory take. -916,"East experience evening region international while position practice. Protect school carry baby issue because kid go. -Almost company article face pick community. 21X95" -917,"Soon better our six. Have again event control which read. -Recognize hospital development grow work too for trial. Message game we someone expert. Conference during must live many choice." -918,"Both remain learn want summer stay send. Get record your person. -Call foreign surface seat until. Cut reach suffer." -919,Develop management bag partner certainly read cost. Establish deal option seem how ok wear history. -920,"Join decade both voice fall. Claim experience condition like memory. Writer if after live mind usually. -Matter score few century audience. -See traditional long water." -921,"Tonight look beautiful look only authority day. Statement likely plan child lose. -Some truth control my customer. Bar guess sea key single phone. 62x9" -922,Set thousand suddenly relate cold also religious. Final teach art recent party else one we. Team event heart practice leader physical. -923,Ten down some Congress finish similar Mr. Large resource economy memory itself write. Artist thank tonight past back same myself agent. -924,Voice third with I number nearly. Field from detail itself successful sell while rest. -925,"Century break push collection model. Drop matter ten Mrs. Black professor energy music. -Score hour hour into staff weight book. Style series cover really. Matter whom yet interest part memory use." -926,"Fill garden break though step this cold. Show sea could power tonight key. -Reveal interest medical near because agreement. Blue government send especially clear record. History blood by exist yard." -927,Film manage prevent certain member act sister safe. Language property four value least consider. Series region any indicate again. -928,"Story trade within worker. Well bag similar window information. View case kid total total personal yard. -Change whom trip option technology." -929,Suddenly cover develop fact such remain. Real area fire safe. Nature magazine man morning. On purpose whom mention. -930,"War trip poor kid. Open indicate time. -Everything state stuff participant word full. Develop around million require. Believe action per. -Perform fine case myself. Wait learn language area." -931,"Mission people travel church interest beat. When realize begin. Great fact party guess company south PM. -Kind watch stock. Mouth coach while write partner. 91x6" -932,Who movie anything land voice. Real mention likely center knowledge data. Way move anyone window. -933,"Message within offer. Quality less bar attorney college every region. -When focus others left notice. Option wife nature huge west. Author window floor Democrat smile." -934,"Although image allow this exist raise social. Mrs goal themselves add. -Might minute beautiful evening. Community others opportunity wish." -935,Station most remember young hundred professional fish trip. Fill ten young whole current wrong. Owner officer vote report loss reflect statement. -936,Capital shoulder remember news all head learn nor. Occur region law attention word manager in. Go wait table early toward road. -937,"Still size affect standard later road if. Road close share challenge according. -Month special agree national talk country. Would look keep value green Republican." -938,"Not education recognize well record. Police ground religious class tree. -Themselves so idea address now stop one. Third laugh tree food. -Away area add social to market car." -939,Care effort amount walk. Way other your human region leader leave. Attention because particular property occur set. -940,Money bill technology better generation old including. Six dinner discussion support too despite claim. Statement plan happy dream pull work. Help effect window. -941,Attack baby church my center box sit. Describe space site important our model. Participant then offer. 93x1 -942,"Charge remember charge appear serve. -Smile like rule know style American. West move grow ever. Cup take last southern." -943,"Walk cup like history. Visit teach enjoy ask. Gas population wrong exist TV throughout. -Dark else practice scientist. Family ahead value though. Question east upon daughter ability health." -944,"Bed give call that best. Single foot year seven defense team beyond. -Color read whatever reflect case trouble. Morning mind board money late. Medical lot they particular before." -945,"Task action alone simply. Partner great benefit accept. -Pm improve for any resource thank hair. Political school key movie rest child lead. During trouble social just interest term parent." -946,"Economic particular speech between and show training. Nothing commercial sing democratic bag whose. -Similar five draw nor safe across. Mr class business land. 55X78" -947,"At million piece why tree condition. -Them different painting oil partner couple my. You good year drug fall lot." -948,"Serve her film purpose. -Cultural key others deal rise. -Difference pressure away ground fight." -949,"Offer score respond building. Mother reduce large laugh begin star. -Perform those can training board significant. Ok individual bar skill. Buy player might somebody. Image four continue." -950,Its nature right plant in voice your. Central become offer piece they staff. Court require morning friend agency thought. -951,"Laugh television voice charge fact. Money benefit goal tree cultural that huge. Send issue agent buy shake. -Result imagine program. Third see college fish manager. 46x50" -952,"Respond edge media long under fund student. Already sister unit lawyer. During administration lose skill TV. -Its soon his deep. Whole street onto hotel exactly late continue." -953,"Reveal return capital. Step season citizen. -Notice analysis eat camera marriage build family. Relate green ok thousand." -954,"Area face left quality summer herself finish. Everybody again say focus require. Late miss Mrs recently. -Million ball practice national fine Mrs wall. Public new think position audience." -955,"Art suggest well check voice usually. -Identify back clearly. Interesting group she what see miss." -956,"Behind color play water. Least build drug when with time board. -Feel after chance out light oil order. Both trade environmental use hold water. Majority test concern outside seem." -957,Show either product say toward. According last cultural put might middle. Water structure letter learn raise. -958,"Remember protect some worry practice. Finish clear try better seat. -Weight coach admit plant meeting. Forget ground event president." -959,"Business television entire. Agency collection total trip response. It decide piece reflect. -Teach behind street color stock. Hour wear onto reason them resource father sound." -960,"Amount hundred know field herself. Always later tell painting military green. -Camera bad nature really job nearly. Buy agency night stock. Threat very hair effort." -961,"If manager response land or four score. Director just better bed oil question future. Society task all deal above resource. -System lead yourself two likely morning. Street either realize care cost I. 83x14" -962,"Southern scene police art southern dream author foreign. Sport character possible environmental. Economy top ball positive room degree choose story. -Economic long indeed national. Him walk blood may." -963,"Few half policy produce view. Issue cover bit himself. Cost develop partner. -Positive fine we beyond figure. Group determine forget." -964,"Education then social space environmental. -Some budget wrong might prove care. Much director table yard nearly he drive. Also financial during lawyer forget morning site market." -965,Result above democratic minute fear author or. American whatever professor recent. Fish section green. Community only give include occur low big. -966,"Learn brother region serve set push. -Loss range any during. Soldier sit bank half test. Various itself much." -967,"Throughout summer wear within wear far action something. None long law agency skill item break example. -At ready popular career. Community degree past feeling." -968,"Hot fast middle store. Second city become because someone must professional policy. Those director game former position. -Few today allow factor down. Bar though exist half today." -969,"Ten standard their very. Pattern significant special voice care. -Bar she deep interest line describe. Walk sing themselves recognize serious business no." -970,"Treatment cell whether keep shoulder. Same but field father. -Time born step method number lose." -971,"Sister show difference north yes hospital buy. Himself in imagine. -Plan music section finally series. 48x94" -972,Eat agreement let become card save recent. Wrong suggest realize growth side hard. Today lot seat do certainly easy chair. -973,Though administration base claim and. Effort its can usually sound. Prepare international job. Nice opportunity build discussion leg score time. -974,National eye sense coach his during. Moment section contain bill west deep policy medical. Age visit almost table. Develop theory today writer under whose fly remember. -975,"Pattern several prepare pass attorney. Get dinner major partner shoulder. -Stay me value TV. -Important daughter customer story bring impact network. Research form physical discover enter." -976,"Movement market amount during who. Close each story still personal. Commercial response join. -If college could. Oil baby less example meeting all treat. 69X28" -977,Speak continue degree capital understand officer move. Policy cup would but. Imagine man financial include far. Financial forward itself cup management. -978,"No drug music involve other offer reach place. -Ahead member region business offer him. Relate close relationship financial. Contain kitchen page church after particular. Seek situation list sing." -979,Enough cultural respond natural once modern. Congress tonight suddenly daughter drive evening on. Natural table difficult live at else huge. -980,"Black fill travel serious. I order box coach bring share common be. -Personal make share her. Weight article into officer stock meet pay. Find government without century standard." -981,"At natural lawyer check. Effort significant war return address friend. Reduce easy hand start material look than. Really of short both happen. -Heavy friend social. Edge record message. 95x91" -982,"Control reach visit through house worry. Interest fish reason company image tough. -Seat buy debate some. About audience really strong eye red. Onto coach interview." -983,"Audience too group camera. -Image middle community rest between more one hand. Task science push movie positive avoid. Call take member task collection drop purpose." -984,"Glass send population investment. Over address begin boy by seek. -Protect direction cup personal civil window." -985,Executive few us beautiful morning food. Push seek soldier military site. Thus whether consider guy carry from. -986,Worry nice message day no factor. Cover maybe drop month. Physical mission what finish learn. -987,"Gas our support adult. Less career around on he though laugh. -Start sure east vote every reality. Wonder three treat receive grow total. Eye minute piece account speech image." -988,"Leader on example course. Hand have four account already party opportunity. -Must enter easy popular paper character. Around green they doctor bill performance." -989,"Major bank per least we investment. Late civil story. -Home detail soon or indicate. Key image skin husband argue performance avoid. -Want lay capital allow. Include whom body stand tend." -990,Behavior south policy since pass amount. End trial choice myself throw. -991,"Everybody avoid reason office how before light. -Difficult near young window woman court avoid. Prove car not car. Note might more stuff history all road. 80x10" -992,Station plan painting executive second improve. Measure side these subject. Fear language believe another generation Democrat. Couple despite others today. -993,If somebody significant out boy. Age traditional mind health laugh if. -994,"Crime small finally behind must from. The deep today standard. -Look individual group. Few environmental represent exactly tonight. Growth house international." -995,"Fight model senior large no. Environment behind group decide lose. -Fear keep town choose her model should. Here summer professor base. Law leave there." -996,"Season cut forget wonder local special reveal. Lead hospital individual theory inside take. -Analysis with energy pattern throughout today. Last can drug day letter. Paper among bad risk." -997,Condition lawyer staff success training. Approach improve individual while note weight decision. Time determine television under old blood. Admit pull PM relationship from direction. -998,"Other history analysis garden democratic. Court sign north and concern past money. -Shake particularly federal way. Decision sort fish attorney. Whole back oil suffer." -999,Buy health prevent program word trip. Effort not foot head network assume character. Discover hundred whom time. -1000,Step act its remember. Around style life choice grow. Top seat local just director class identify. diff --git a/data/raw/f_723_simon.py b/data/raw/f_723_simon_chien_edit.py similarity index 54% rename from data/raw/f_723_simon.py rename to data/raw/f_723_simon_chien_edit.py index 00673b57..8eb40609 100644 --- a/data/raw/f_723_simon.py +++ b/data/raw/f_723_simon_chien_edit.py @@ -1,130 +1,128 @@ -import pandas as pd - -import pandas as pd -import random - -def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, seed=42): - """ - Search for matches with a specified regex pattern in a given column of a CSV file and optionally return a random sample of these matches. - - The random sampling is implemented by generating a random list of integers which are used as indices. - The number of generated indices is given by sample_size. - - - Parameters: - csv_file (str): Path to the CSV file. - column_name (str, optional): The name of the column to search. Defaults to 'data'. - pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. - sample_size (int, optional): Number of random samples to return from the matches. If None, all matches are returned. Defaults to None. - seed (int, optional): Seed for the random number generator for reproducibility. Defaults to 42. - - Returns: - DataFrame: A pandas DataFrame containing either all the rows with matches or a random sample of them. - - Requirements: - - pandas - - random: for generating the random list of indices - - Example: - >>> result = f_723('sample.csv', column_name='data', pattern='\d+[xX]', sample_size=10, seed=42) - >>> print(result) - index data - 210 211 Fund several agency oil. Evening plant thank t... - 45 46 Language interest four take old. Education if ... - 525 526 Action million cultural stand. Heart explain a... - 465 466 Security face clearly every could. Image beaut... - 430 431 Popular produce floor part soldier human. Youn... - 260 261 Customer game focus respond that central. Nigh... - 195 196 The writer parent. Life social house west ten ... - 165 166 Main hotel production nothing.\r\nCoach voice ... - 810 811 Early right nature technology. Conference mind... - 60 61 Interest require gas wall. Different it see fi... - - >>> result = f_723('sample.csv', column_name='data', sample_size=2) - >>> print(result) - index data - 125 126 Fund elephenat, the dinoasuar eat this language t... - 21 22 Such an important story banking at the house a da... - - - - """ - df = pd.read_csv(csv_file) - matches = df[df[column_name].str.contains(pattern, na=False)] - - if sample_size is not None: - random.seed(seed) # Set the seed for reproducibility - sample_size = min(sample_size, len(matches)) # Ensure sample size is not greater than the number of matches - sampled_indices = random.sample(range(len(matches)), sample_size) # Randomly select indices - matches = matches.iloc[sampled_indices] # Select rows corresponding to sampled indices - - return matches - - -import unittest -import pandas as pd -import os - -class TestCases(unittest.TestCase): - - data_path = os.path.join('f_723_data_simon', 'complex_test_data.csv') - - def test_case_1(self): - # Testing with default parameters - result = f_723(self.data_path) - expected = pd.read_csv(os.path.join('f_723_data_simon/test1.csv'), index_col=0) - pd.testing.assert_frame_equal(result, expected) - - def test_case_2(self): - # Testing with custom column name - with self.assertRaises(KeyError): - f_723(self.data_path, column_name='non_existent_column') - - def test_case_3(self): - # Testing with custom pattern - result = f_723(self.data_path, pattern='\d+X') - expected = pd.read_csv(os.path.join('f_723_data_simon/test3.csv'), index_col=0) - pd.testing.assert_frame_equal(result, expected) - - def test_case_4(self): - # Testing with pattern that has no matches - result = f_723(self.data_path, pattern='XYZ') - self.assertEqual(len(result), 0) - - def test_case_5(self): - # Testing with non-existent file - with self.assertRaises(FileNotFoundError): - f_723('non_existent_file.csv') - - def test_case_6(self): - # Testing with random sampling - sample_size = 10 - result = f_723(self.data_path, sample_size=sample_size) - self.assertEqual(len(result), sample_size) - - def test_case_7(self): - # Testing the reproducibility with seed - sample_size = 10 - result1 = f_723(self.data_path, sample_size=sample_size, seed=42) - result2 = f_723(self.data_path, sample_size=sample_size, seed=42) - pd.testing.assert_frame_equal(result1, result2) - - def test_case_8(self): - # Testing with a sample size larger than the dataset - result = f_723(self.data_path, sample_size=1000) - self.assertEqual(len(result), 133) # Should return all available matches - - def test_case_9(self): - # Testing with zero sample size - result = f_723(self.data_path, sample_size=0) - self.assertEqual(len(result), 0) - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -if __name__ == "__main__": - run_tests() \ No newline at end of file +import pandas as pd + +import pandas as pd +import random + + +def f_723(csv_file, column_name='data', pattern='\d+[xX]', sample_size=None, seed=42): + """ + Search for matches with a specified regex pattern in a given column of a CSV file and optionally return a random sample of these matches. + + The random sampling is implemented by generating a random list of integers which are used as indices. + The number of generated indices is given by sample_size. + + + Parameters: + csv_file (str): Path to the CSV file. + column_name (str, optional): The name of the column to search. Defaults to 'data'. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + sample_size (int, optional): Number of random samples to return from the matches. If None, all matches are returned. Defaults to None. + seed (int, optional): Seed for the random number generator for reproducibility. Defaults to 42. + + Returns: + DataFrame: A pandas DataFrame containing either all the rows with matches or a random sample of them. + + Requirements: + - pandas + - random: for generating the random list of indices + + Example: + >>> result = f_723('sample.csv', column_name='data', pattern='\d+[xX]', sample_size=10, seed=42) + >>> print(result) + index data + 210 211 Fund several agency oil. Evening plant thank t... + 45 46 Language interest four take old. Education if ... + 525 526 Action million cultural stand. Heart explain a... + 465 466 Security face clearly every could. Image beaut... + 430 431 Popular produce floor part soldier human. Youn... + 260 261 Customer game focus respond that central. Nigh... + 195 196 The writer parent. Life social house west ten ... + 165 166 Main hotel production nothing.\r\nCoach voice ... + 810 811 Early right nature technology. Conference mind... + 60 61 Interest require gas wall. Different it see fi... + """ + df = pd.read_csv(csv_file) + matches = df[df[column_name].str.contains(pattern, na=False)] + + if sample_size is not None: + random.seed(seed) # Set the seed for reproducibility + sample_size = min(sample_size, len(matches)) # Ensure sample size is not greater than the number of matches + sampled_indices = random.sample(range(len(matches)), sample_size) # Randomly select indices + matches = matches.iloc[sampled_indices] # Select rows corresponding to sampled indices + + return matches + + +import unittest +import pandas as pd +import tempfile +import shutil +import os + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory to store the test CSV files + self.test_dir = tempfile.mkdtemp() + self.test_file = os.path.join(self.test_dir, "test_data.csv") + + # Create a sample DataFrame + data = { + "data": ["123x good", "no match here", "456X bad", "789x good", "ABC"], + "other_column": ["data1", "data2", "data3", "data4", "data5"] + } + self.df = pd.DataFrame(data) + self.df.to_csv(self.test_file, index=False) + + def tearDown(self): + # Remove temporary directory after the test + shutil.rmtree(self.test_dir) + + def test_default_parameters(self): + result = f_723(self.test_file) + expected_data = { + "data": ["123x good", "456X bad", "789x good"], + "other_column": ["data1", "data3", "data4"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + + def test_custom_column(self): + with self.assertRaises(KeyError): + f_723(self.test_file, column_name="nonexistent_column") + + def test_custom_pattern(self): + result = f_723(self.test_file, pattern='\d+X') + expected_data = { + "data": ["456X bad"], + "other_column": ["data3"] + } + expected_df = pd.DataFrame(expected_data) + pd.testing.assert_frame_equal(result.reset_index(drop=True), expected_df) + + def test_sample_size(self): + result = f_723(self.test_file, sample_size=2, seed=42) + self.assertEqual(len(result), 2) + + def test_no_matches(self): + result = f_723(self.test_file, pattern="nope") + self.assertTrue(result.empty) + + def test_sample_size_larger_than_matches(self): + result = f_723(self.test_file, sample_size=10) + self.assertEqual(len(result), 3) # Only three matches exist + + def test_zero_sample_size(self): + result = f_723(self.test_file, sample_size=0) + self.assertTrue(result.empty) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_724_data_simon/empty.db b/data/raw/f_724_data_simon/empty.db deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_724_data_simon/sample.db b/data/raw/f_724_data_simon/sample.db deleted file mode 100644 index 651075e5203f3f91752c197ff880b266f72ed0d0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20480 zcmeI3ON?CURmZE{_PD#c-8EsDI1@87pP5XO3CT_Dw#PG>z@+_l#`bu+dmf&Um|Im} zRbBV3x|RFrs&ZCM1VTaxSOXhS#0CjPVUYz9utLfL6e1Q83&cb0ct}JD2^ReR=eyPI zVZ#Oq2~{cMuE)LKd5SRk-KV1d8_fdv8!1QrM^ z5Lh4}1cpl|PoF)zbns#sw^LvGqI~e*-W|KLeQj&^n%mvFeEXU^y25?3>3AhwckAx% zwHw#A-M#Hwceb|gyEm@g|Fr#P7k9F>8jjrVwfAk z2rLj-Ah1ASfxrTR1p*5M76>d5SRk-KV1dB@0|buWWfRNK>1_VF#ve6WKWhC=>(5)? zZvDo_pKW~P)K}O4V*SOnZ?D~GeslF-R)1yX2P>CP{_y0hPvj^5{>1S3caFci{M*Y< z9UC5de(4vu`Ty>9aOJtM5HtCLn;40mgYG2J-v9ZWJR zChTkgGEwt#)s=zEK~@zpTa^L5^V4yPjTt3P1y76v47i7HG0Pp#9fB)Z+ht>RZ!pGv z?h@(=t6cvmt9*7hF808z%JGz)lTJdk-N(Ziz3F!N_IOs z{FKU=lVsG5+)dCZk9@G3#}iyN(`cQfmvoqIROVUA<=R&1vJt*}aQ+#Vlk5zDT&|-( z7$C-j_A{LO@UDOv7Q(K~=ekMJsfvQlRz>9Q`CJ}2cdxeZ1C!R}3ZL(5oH9=L1UxQZ z0PuYogjEztm*;~X8xUM0cWvLZ)j}Rvr76FsLpPB}uo?FH;3N3Wm96d&izcIex|lGXF@L!Le$)ir-XJke9TAV4wtZl zF;f+o)aQ}AEx#MbxsgpZI*O19NJj4R%;g!8;00AX>&~L;!=RE~_dazXLVq_Y25w?g z<7ZA(W5^P#)%Y-8yLbmCh}jK>ke7Gke9w*ZtXp;1*mW6zAmjP1Zk1bv;usLgm`gO| z!P?Vcb!InXKP7B8!T=q4tAbCfGcUY%$CcScMlThgk-M7oa7vgD9+R8YMw1tDOAdxm zjL+GKocoOk3-p~ZpU1Y6IXFcKmqO;?kX}I%9ynF zlu0YrLm%#aqFI*i9ixsAYsI z_{fwyp4d~!;F3lNfi^mU$1MjGGptVh1V)Jto_z{XU%#0ZQemzzEEP zY!cw@D*8pQSgEUj(xfDM${YidS3rDBZh@A|ESt}ON3;Hfp|QJo)s4jF&JV|FHWSzF z;}$Rk+a2PT>=inbbz;#*|HtsPog*7|y%y?SHX(X}pnWqL z7C!AAM(G@P5$H(1n6XKc*T#nc;_SPm4OSPI&|jcbf_O%#z&Jj}TX6%W(8oxVqY7Qy z0|_~}Tz+5~5Ngo%_Oso<2L)N4b@tp)#^npFUnK3skTjPI0RdrNdXfbd?~;13mH9y> zuByh$S`m4aNMStKE#!vzR7a8OX2$z${*cJ=F?Qg{7s@AMF7a7eb!VXFW4NN4xvR*G z4wn$cqx~e$Mq)j1ReClaCPD}p7q><4aiW2OFQmR#MsW+9-MA@QsUsm7n`1%Wsbz5P zEj{6GEGt8m?rTI4-W0j5o_s+0My}^mZmot2)mnn+JPF?10Gwp2rZbu3NH-aaD+$4$ z#Dch*^)tD`4gpo?fmHg)R78q9<4xxCa*LWHqUKG2ACO)ZljKMI4w{W0C3sDTOi$cv zT~ZjZzFEId3P64Ej|OGUTslmQI--N;PqFAL_vc3OQ$;YwRoY?(_#ASG3*MXmoBtI+ zJP=DNOjvyv)*bq863YXC9Uns!u#7gkW$sY58YF}i#uIV`ktfQxJX6a+w>M(1wN49U z`EVV}-QWJ8SGF@tp(I3xPm_LvRM9)jaK%8jtxyrT1Wee)iv-7Ru4P+_y2ZtvtdG|7 zS|5s5N-7$!F_*XWY?zOY^M@V>DN0*0kMx#sk(r1Xe=MnHqI6{ulH1{$pn3(rDNYiA zy(yESFAzwAGCKHF6a4Sr@T0k2g1SVhdr1j@njBEDGO}d@ZF`UmU6>7ey+UkH*nzY& zH-L9YyZdo&pp2Em{M2_)gb)yaqUOB}w@x~R1(^j~C$TATL}hp6oLLxOVvPhLYO0XP zTwP_cr{5$R(1B8XmIAQ2vg>Z9cvLR!JE#{SOv!-2tleY`W3yh=4bg34;UOu-p`hB9 z<#y#bCS?@3Brb}8L!O2ihC!_}1-TU9F0HVZ;zniEVcwziYFW0Iq0TG@T&#Re03(-1 zcy_HxK?@_r0#)?UX2v}-H9inEQQ3GdEWlTJJi=EkWp69e%#I&4dz>0;7ASyhN<0It zydaUz>QhyW-JbVj#8y$Ot>|F$BrD&;qY=70aW% z`jsLylqn7o-2drh|Asd}6t=xfSGwjNBz=OXmIP@#V&A_k#Kh7+-u(m6nvm zlgrDEPi#GY^@olvH(t8yCvJ1PiF$qXDD};y<;Ix{(+k)pSN;FFM%HLu-uTwWl~ccS z>h$^_uRpo=2hD$MK3e_$>N6|9ymInn^2Cpx*g5e_$G>;{%gf(g{`|4OKKAO;H z<)x{z(drEw3=NPruXEexH0yM#{M5DO#@Tbzc?xe{<)?F)A2Y683G?`Ps?_Kf{F=r;^00KW%UONuI8KVY%__ z<}_5OtFPI*RHrsS&*T@Ue)KS#t-Wg33{7>czGCwganB!YUQ(#tH3Q1=%?~=senRrO zoXe=$(f{%AJxfkAJq+28i(QKhf0u^1l~H49OpZ3=pZD?wJK{w8}oWIbOh?c zR>=p$q(`FXXuv}q1mn%{Q)>B_S@j$D6gbvqs7nx7g6fezibPharc0SosGDe_S%Ra- z33Z%p&(s0w83i+!XGEY~YmEgeF^L^3psegudV%4rT``yQ%^OM?rcTH~@lgh%Dva)5 zBv)R#L)z~!M_sGc(aAg#xD=;|2aPdr9^ib0zk5@Od#>n0TSfg2SVV~-I%r$~fD@Ok zkhbQCf?GME81hGxmE_CNB4Gw4BGf&67SauZSM;xSDH-y$`Z3ZK6q;0x5`p6VVVqM1 z^!b$*097hOQWVd8OksCslO>1?pfWY2)K3aoHV9PNAz&Zyk=_q2i<;J`wWL<>NVezj zH`dR_mftihm>>|vtWbq|0=Fg2w2MKEHtr0p|6tr-lR7q6X6?}*i-zaH{=p5R!|f+* ztex~_%8+ zb(ToN?q(f)PbrJE8X9U6ZKlnRX{h?Hy7r_4$X2*3YF?!n>8sh6 ztLLOBi%EAxBvTQvAhs*zS5r$rVWJoi})vGoQm zh#tekCfGJq()eSHV#HI^1Xl+o|e6kqS;ZqK3Q7l1m&vuh8#w zvFCbQrHYAJ6TUmf5Zdcdm0HKTJk2ZhPOOtrQwn1PGDy|BZ7T>tHE4s%2|N%TEPa~Q zo!klZid-_-AqVU5nNB?Hu5{IyvjEz$?R7o%B7zr@tl~~$6y>f0L~06hF*z0VA0fhb z;z-rva8W)M;s_A}cE)_opsN-c;W? zn57VOR=fm?vXTq)mmA{*y$~YD z3J~9)NmpcQ6B2F+wuF_>yTgi?;CyNoh)MTVLC&47L*e#FW#P8mSPFh5{Q)a82E~S* zR($P~L}CAGZ9=nW3R!V-q3k(tUIL+=dVrZoRw+|C;LsWC>aXS^D-J%`dMVY$S0J&W zgR7rV>v6|AC^aF6pOnkwh!dlyeoDgwX0_)2VIzbrc;q|?MJyKj6Lp!a4iIUOqK8e? zXG`<8G5DZ_7!~mrx)8?R_?b1E=W(E9$Rwn2_E@uo4+B1?R^IlsOP zQD!Ce*iKTcK_Z7Z>g`*&^(7i0w5|$axtiWb&1)3jI09aW1!dpEqIuquK6I^~@iJ6c z`|6P0)#eTLE*i$FRrN@sw?76JpS=Phs__doU?ue?hfHXaYHKtL(s87cw~hE#^$JJb za>J?snjn>}2L(EGrL^FK)fa%_H1d-!tJ>G$JWB~4AnLvvf`(;gvw4F}0tXh?^m4k~ ztBDDU99Ge?uw@wp;q(St%Ag(9&FUQokw2GjLu(A#*&dzdpJXksUnZi*8J!Z6#T>LE zv$z5mN?A2T03Xf~57Kl4a`iwYSwbF!aisT{P<%6zQwA-4=r{9*%;TMX@`{c|Xz7H6 z&!)f$FsYDmqbNO#3BE$=5G$FAVBh9F*6ftJoY5x1t<{L*98f`iR2u8NbgMq9KxIpF z)i4V_Yf!3XXjx?+e%o-MadwN(A84hSh(H(x`4w7=xP?ydthhg1wRVzMBeAdc&&q-n z5|ivp#Hd8FDqfqge7h|vF$2aPdPvXE%YC(rGM=J4pJzc|-$8G}bs3FM1z(^FU3%C% zdPgF-5a`D0RFv{7JFX25-N(WPfZ7Sx1*v1-#2^x2B=WTXHvr2PN{;VhLa;0jS6 z>Y+PiSlQT4Lh48y$MA)V!{Kx7L6PeJGaKJ*wEngA53RpyeZTc5t?#scuk~B4Uu_+< z%2vPie(P@Q3$635kF}m|H8=iqkCMu##|W`|32#hwB5*W?$y?)t)<9>vN)dZaVaONN}~siR#N! zoyM+vuHwiy&-9pSvukIt)~sFYaL2`IcusP?9WJABL4R;H=KfcvdK&1|&vAl$ep)|4 zP^>@Tx(gbP2QK}}S6K|l%f5W_D;&0O>cIkg>WIf*nm=n(*~2YsUuLRH(=?v&kkQ)v z_JNK@S+do6=Cpn~ZT(&Q)S(GQ|L)W~wvPGptj)I>{le5*=O=fU z8&98`W_ueuoKJIfuFk!>$1B@*#7)m;^({8UUU?W|?H)6ppVG&rak}=VeaO8I5B>|K Cq{@H* diff --git a/data/raw/f_724_simon.py b/data/raw/f_724_simon.py deleted file mode 100644 index 1f14b851..00000000 --- a/data/raw/f_724_simon.py +++ /dev/null @@ -1,253 +0,0 @@ -import sqlite3 -import pandas as pd -import os - - -def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): - """ - Find all matches with a regex pattern in a list of strings in an SQL database. - - The function loads an sql database and selects all entries from the specified - table. All entries of the specified column are matched against a regex pattern. - Matches are returned in a DataFrame. - - Parameters: - db_file (str): The SQLite database file. - table_name (str): The name of the table to search. - column_name (str): The name of the column to search. - pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. - - Returns: - DataFrame: A pandas DataFrame with the matches. - - Raises: - ValueError: If db_file does not exist. - - Requirements: - - sqlite3 - - pandas - - os - - Example: - >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column') - >>> print(result.head(10)) - id test_column - 0 1 4x4 car - 1 2 New 3x3 puzzle - 3 4 Product with 5X feature - 55 56 1xsafe - 56 57 3xmother - 57 58 5xenjoy - 58 59 2xhome - 59 60 3xanswer - 60 61 5xgirl - 61 62 5xkind - - >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column', pattern='kind') - >>> print(result) - id test_column - 20 21 To between successful ever ago PM toward today... - 42 43 Entire manage wife management perform size def... - 61 62 5xkind - """ - - if not os.path.isfile(db_file): - raise ValueError('db_file does not exist.') - - conn = sqlite3.connect(db_file) - df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn) - - if df[column_name].dtype == 'object': # Check if the column data type is a string - matches = df[df[column_name].str.contains(pattern)] - else: - matches = pd.DataFrame(columns=df.columns) # Return an empty DataFrame - - return matches - -import unittest -import pandas as pd -import os -import os - -class TestCases(unittest.TestCase): - - def setUp(self) -> None: - self.db_path = os.path.join('f_724_data_simon', 'sample.db') - self.empty_path = os.path.join('f_724_data_simon', 'empty.db') - - def test_case_1(self): - # Input: Database with known data, table name, and column name - result = f_724(self.db_path, 'test_table', 'test_column') - excpected = pd.DataFrame( - {'id': {0: 1, - 1: 2, - 3: 4, - 55: 56, - 56: 57, - 57: 58, - 58: 59, - 59: 60, - 60: 61, - 61: 62, - 62: 63, - 63: 64, - 64: 65, - 65: 66, - 66: 67, - 67: 68, - 68: 69, - 69: 70, - 70: 71, - 71: 72, - 72: 73, - 73: 74, - 74: 75, - 75: 76, - 76: 77, - 77: 78, - 78: 79, - 79: 80, - 80: 81, - 81: 82, - 82: 83, - 83: 84, - 84: 85, - 85: 86, - 86: 87, - 87: 88, - 88: 89, - 89: 90, - 90: 91, - 91: 92, - 92: 93, - 93: 94, - 94: 95, - 95: 96, - 96: 97, - 97: 98, - 98: 99, - 99: 100, - 100: 101, - 101: 102, - 102: 103, - 103: 104, - 104: 105}, - 'test_column': {0: '4x4 car', - 1: 'New 3x3 puzzle', - 3: 'Product with 5X feature', - 55: '1xsafe', - 56: '3xmother', - 57: '5xenjoy', - 58: '2xhome', - 59: '3xanswer', - 60: '5xgirl', - 61: '5xkind', - 62: '4xsituation', - 63: '2xsimilar', - 64: '1xloss', - 65: '4xdifficult', - 66: '7xsea', - 67: '1xpeace', - 68: '1xyourself', - 69: '3xoccur', - 70: '2xbill', - 71: '7xmore', - 72: '4xeye', - 73: '4xgoal', - 74: '7xwhite', - 75: '7xsix', - 76: '9xcourt', - 77: '2xhave', - 78: '2xremain', - 79: '2xbehind', - 80: '5xdeep', - 81: '7xold', - 82: '7xstatement', - 83: '1xok', - 84: '4xsport', - 85: '2xtraining', - 86: '3xclearly', - 87: '3xsupport', - 88: '8xrecord', - 89: '7xpretty', - 90: '7xlawyer', - 91: '5ximage', - 92: '3xperformance', - 93: '1xyet', - 94: '4xproduce', - 95: '2xpolitics', - 96: '8xback', - 97: '1xpartner', - 98: '5xduring', - 99: '5xcoach', - 100: '1xproject', - 101: '6xgirl', - 102: '1xmember', - 103: '6xkitchen', - 104: '9xarm'}} - ) - - # Check if the result is a DataFrame - self.assertIsInstance(result, pd.DataFrame) - - # Check if the returned DataFrame has more matches than the initially inserted ones - # Since we added 50 more entries that match the regex pattern - self.assertEqual(len(result), 53) - pd.testing.assert_frame_equal(result, excpected) - - def test_case_2(self): - # Test with a column that has no matches - result = f_724(self.db_path, 'test_table', 'id') - self.assertEqual(len(result), 0) - - def test_case_3(self): - # Test with a non-existent table - with self.assertRaises(Exception): - f_724(self.db_path, 'non_existent_table', 'test_column') - - def test_case_4(self): - # Test with a non-existent column - with self.assertRaises(Exception): - f_724(self.db_path, 'test_table', 'non_existent_column') - - def test_case_5(self): - # Test with a database that has no tables (we'll create an empty db for this) - empty_db_path = os.path.join("f_724_data_simon", "empty.db") - - with self.assertRaises(Exception): - f_724(self.empty_path, 'test_table', 'test_column') - - def test_case_6(self): - # Test with a non-existent database file - db_path = os.path.join("f_724_data_simon", "non_existent.db") - - with self.assertRaises(Exception): - f_724(db_path, 'test_table', 'test_column') - - def test_case_7(self): - # Test with a different regex pattern - # Here, we are trying to match any word characters (alphanumeric characters plus underscore) - result = f_724(self.db_path, 'test_table', 'test_column', pattern='ab') - expected = pd.DataFrame( - {'id': {14: 15, 15: 16, 21: 22, 30: 31}, - 'test_column': {14: 'Hand soon trip sing. Plan cost tell spend that quality. After listen help summer likely able.', - 15: 'War not century support. That send other president everybody yourself necessary.\nLess rather recent study dream buy Democrat. Environmental game thus first about air reality left.', - 21: 'Feel establish would our each. Even cup college decide alone free dark product. Full great during avoid approach.', - 30: 'Store foot near interest husband. View several position write keep guy wrong. Degree either blue plan up table beat.\nFire while less section. My save cost nation paper affect between.'}} - ) - # Check if the result is a DataFrame - self.assertIsInstance(result, pd.DataFrame) - - # Check if the returned DataFrame has matches - self.assertEqual(len(result), 4) - pd.testing.assert_frame_equal(result, expected) - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_724_simon_chien_edit.py b/data/raw/f_724_simon_chien_edit.py new file mode 100644 index 00000000..fede53cc --- /dev/null +++ b/data/raw/f_724_simon_chien_edit.py @@ -0,0 +1,139 @@ +import sqlite3 +import pandas as pd +import os + + +def f_724(db_file, table_name, column_name, pattern='\d+[xX]'): + """ + Find all matches with a regex pattern in a list of strings in an SQL database. + + The function loads an sql database and selects all entries from the specified + table. All entries of the specified column are matched against a regex pattern. + Matches are returned in a DataFrame. + + Parameters: + db_file (str): The SQLite database file. + table_name (str): The name of the table to search. + column_name (str): The name of the column to search. + pattern (str, optional): The regex pattern to search for. Defaults to '\d+[xX]'. + + Returns: + DataFrame: A pandas DataFrame with the matches. + + Raises: + ValueError: If db_file does not exist. + + Requirements: + - sqlite3 + - pandas + - os + + Example: + >>> result = f_724('f_724_data_simon/sample.db', 'test_table', 'test_column') + >>> print(result.head(10)) + id test_column + 0 1 4x4 car + 1 2 New 3x3 puzzle + 3 4 Product with 5X feature + 55 56 1xsafe + 56 57 3xmother + 57 58 5xenjoy + 58 59 2xhome + 59 60 3xanswer + 60 61 5xgirl + 61 62 5xkind + """ + + if not os.path.isfile(db_file): + raise ValueError('db_file does not exist.') + + conn = sqlite3.connect(db_file) + df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn) + + if df[column_name].dtype == 'object': # Check if the column data type is a string + matches = df[df[column_name].str.contains(pattern)] + else: + matches = pd.DataFrame(columns=df.columns) # Return an empty DataFrame + + return matches + + +import unittest +import sqlite3 +import pandas as pd +import os +import tempfile + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to hold the database + self.test_dir = tempfile.mkdtemp() + self.db_path = os.path.join(self.test_dir, "test.db") + + # Set up a new database and populate it with initial data + self.conn = sqlite3.connect(self.db_path) + self.conn.execute("CREATE TABLE test_table (id INTEGER PRIMARY KEY, test_column TEXT)") + data = [ + (1, "4x4 car"), + (2, "New 3x3 puzzle"), + (3, "Product with 5X feature"), + (4, "1xsafe"), + (5, "3xmother") + ] + self.conn.executemany("INSERT INTO test_table (id, test_column) VALUES (?, ?)", data) + self.conn.commit() + + def tearDown(self): + # Close the connection and remove the temporary directory + self.conn.close() + os.remove(self.db_path) + os.rmdir(self.test_dir) + + def test_regular_expression_match(self): + # Test case with known data and expected matches + result = f_724(self.db_path, 'test_table', 'test_column') + expected = pd.DataFrame({ + 'id': [1, 2, 3, 4, 5], + 'test_column': ['4x4 car', 'New 3x3 puzzle', 'Product with 5X feature', '1xsafe', '3xmother'] + }, index=[0, 1, 2, 3, 4]) + pd.testing.assert_frame_equal(result, expected) + + def test_no_matches(self): + # Test case where no entries match the pattern + result = f_724(self.db_path, 'test_table', 'test_column', pattern='abc') + self.assertTrue(result.empty) + + def test_non_existent_table(self): + # Catch the OperationalError from sqlite directly + with self.assertRaises(pd.errors.DatabaseError): + f_724(self.db_path, 'fake_table', 'test_column') + + def test_non_existent_column(self): + # Catch the correct exception for non-existent column + with self.assertRaises(KeyError): + f_724(self.db_path, 'test_table', 'fake_column') + + def test_different_pattern(self): + # Test case with a different pattern + self.conn.execute("INSERT INTO test_table (id, test_column) VALUES (?, ?)", (6, "something 1ab2x")) + self.conn.commit() + result = f_724(self.db_path, 'test_table', 'test_column', pattern='1ab2x') + result.reset_index(drop=True, inplace=True) # Resetting index before comparison + expected = pd.DataFrame({ + 'id': [6], + 'test_column': ['something 1ab2x'] + }, index=[0]) + pd.testing.assert_frame_equal(result, expected) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_725_data_simon/complex_data.csv b/data/raw/f_725_data_simon/complex_data.csv deleted file mode 100644 index 68e3e529..00000000 --- a/data/raw/f_725_data_simon/complex_data.csv +++ /dev/null @@ -1,1001 +0,0 @@ -A,B,C -2,48,895 -2,26,903 -6,13,899 -5,10,899 -8,1,905 -10,32,898 -9,1,895 -9,36,903 -13,11,901 -13,38,897 -12,48,903 -11,24,898 -16,26,903 -13,37,905 -17,16,895 -18,38,897 -20,43,897 -22,43,905 -22,3,897 -22,32,901 -24,18,895 -23,13,905 -26,49,903 -24,7,900 -28,27,901 -28,39,904 -29,46,899 -32,22,895 -33,41,905 -32,23,899 -30,19,898 -32,48,895 -36,44,898 -37,47,898 -39,49,895 -35,17,900 -40,39,898 -41,29,895 -43,6,902 -44,13,898 -45,48,898 -43,46,901 -43,42,900 -44,3,902 -49,22,900 -46,37,903 -48,15,896 -50,17,905 -48,18,904 -54,10,897 -52,15,902 -52,42,902 -54,21,904 -57,29,897 -57,39,902 -58,13,895 -59,16,900 -62,46,900 -58,30,900 -64,14,905 -62,45,899 -64,41,901 -62,10,905 -68,26,905 -65,47,901 -66,11,902 -66,7,898 -67,40,895 -71,40,901 -70,19,896 -73,4,901 -75,14,898 -76,33,895 -75,42,903 -76,7,903 -77,29,899 -76,30,898 -82,5,903 -82,49,902 -81,46,895 -80,15,903 -85,7,903 -87,26,896 -85,16,905 -89,42,899 -90,22,895 -87,28,897 -89,39,895 -89,9,899 -93,8,897 -94,36,901 -95,35,896 -95,10,901 -96,25,901 -97,37,904 -100,19,898 -98,2,905 -99,4,895 -103,44,903 -103,47,901 -103,32,899 -101,7,904 -102,4,904 -106,39,901 -104,41,896 -108,13,902 -109,9,900 -108,49,899 -111,25,898 -110,28,896 -111,23,900 -116,42,895 -112,25,905 -115,39,904 -115,48,904 -119,30,903 -120,25,902 -117,39,898 -123,18,897 -122,3,900 -120,22,901 -124,26,897 -125,32,899 -124,10,900 -127,34,896 -128,1,896 -130,22,895 -127,8,901 -130,28,901 -129,3,898 -130,24,902 -135,26,896 -137,17,897 -136,39,898 -134,43,896 -139,5,895 -138,31,903 -138,30,904 -138,11,905 -142,3,899 -145,45,895 -146,31,903 -147,39,902 -143,1,896 -144,33,903 -150,39,905 -150,27,900 -149,19,895 -148,24,904 -152,38,895 -152,26,897 -152,24,902 -157,7,904 -158,41,903 -159,33,905 -156,49,902 -161,49,897 -158,34,898 -161,38,899 -164,13,897 -163,44,904 -165,3,896 -167,49,901 -164,40,905 -169,4,897 -167,0,898 -171,29,897 -167,24,899 -170,15,905 -172,1,900 -171,34,903 -172,44,899 -172,16,900 -174,47,895 -176,8,904 -180,12,898 -178,26,898 -180,30,899 -178,19,898 -182,8,902 -181,6,903 -181,2,897 -184,23,901 -188,41,905 -187,46,897 -188,26,904 -188,17,895 -191,13,900 -189,19,896 -193,45,898 -193,21,899 -192,34,904 -192,27,897 -198,26,898 -199,23,903 -199,49,902 -197,13,900 -202,14,900 -198,17,898 -199,4,903 -200,34,902 -202,48,898 -204,22,902 -205,39,895 -206,9,901 -210,27,905 -208,40,897 -210,33,902 -208,48,897 -213,22,902 -215,48,903 -215,21,902 -214,4,905 -214,15,896 -216,30,903 -216,18,904 -218,23,898 -217,45,895 -218,0,895 -223,14,895 -221,24,903 -222,36,903 -227,38,905 -228,13,902 -225,28,896 -229,3,896 -227,13,899 -232,43,904 -230,49,895 -230,36,900 -234,19,903 -233,39,904 -232,44,896 -236,31,901 -237,41,895 -240,13,900 -240,6,896 -238,1,903 -240,21,901 -244,49,905 -243,43,903 -241,8,899 -247,37,902 -243,16,901 -245,45,905 -245,9,900 -246,4,898 -247,15,903 -250,26,901 -253,26,901 -250,34,902 -252,23,901 -256,36,901 -256,44,905 -259,28,896 -256,22,897 -260,36,898 -260,17,898 -263,29,904 -261,24,905 -264,28,897 -261,4,900 -265,28,897 -266,49,897 -266,46,895 -270,15,903 -271,40,903 -271,40,901 -268,33,905 -271,7,905 -271,9,899 -273,43,895 -275,24,901 -277,22,905 -278,32,900 -277,17,895 -279,3,904 -280,20,900 -282,44,905 -281,21,905 -284,8,902 -284,32,896 -285,48,900 -286,48,897 -287,21,897 -290,4,901 -287,3,897 -292,2,902 -288,44,904 -290,40,900 -295,7,899 -294,32,902 -292,3,896 -298,9,896 -297,10,898 -295,18,905 -297,31,898 -301,48,897 -302,2,903 -299,19,899 -304,24,900 -303,18,895 -307,22,904 -305,17,896 -304,3,901 -305,29,904 -308,22,905 -309,48,900 -310,37,899 -310,19,901 -314,6,902 -311,45,903 -313,14,902 -317,47,905 -315,49,898 -318,2,899 -319,36,905 -318,1,896 -323,46,905 -322,17,904 -324,17,898 -326,39,905 -325,5,897 -326,1,897 -325,32,896 -325,14,895 -328,28,903 -329,34,901 -333,41,902 -333,13,895 -333,4,901 -332,32,902 -337,20,903 -334,36,898 -339,40,904 -336,26,903 -340,20,899 -337,16,899 -342,27,897 -342,24,895 -341,10,895 -344,0,904 -345,44,902 -344,36,896 -347,43,897 -348,14,896 -349,25,901 -348,8,901 -349,15,899 -353,12,904 -355,14,904 -351,28,899 -357,23,897 -353,45,904 -358,31,905 -357,1,895 -357,34,898 -359,15,899 -359,35,898 -363,36,898 -365,3,897 -365,1,901 -364,19,900 -367,27,905 -367,17,899 -370,12,902 -368,0,902 -368,34,897 -371,25,902 -373,28,898 -373,49,904 -373,36,895 -372,33,903 -378,9,903 -377,16,899 -377,45,902 -380,1,903 -382,2,896 -378,8,900 -383,31,900 -384,8,899 -384,38,900 -383,27,898 -383,26,900 -389,37,902 -390,23,905 -391,37,899 -387,45,901 -391,29,896 -390,22,899 -390,31,901 -393,47,895 -394,24,905 -395,0,903 -394,7,902 -399,34,897 -401,22,900 -398,34,898 -403,30,901 -403,17,903 -404,45,899 -402,23,902 -407,29,897 -405,33,895 -408,33,901 -408,3,901 -408,43,897 -408,19,899 -413,18,899 -412,7,899 -413,0,896 -415,0,903 -416,23,905 -413,8,903 -419,17,904 -419,11,902 -419,49,905 -419,22,902 -418,18,904 -420,49,905 -420,3,903 -426,39,900 -427,37,900 -426,39,901 -428,2,897 -428,36,903 -429,30,904 -432,41,902 -433,16,895 -430,42,904 -430,29,905 -433,0,904 -434,14,904 -436,26,901 -436,41,904 -438,14,901 -436,15,896 -442,34,904 -439,17,902 -444,45,903 -444,21,904 -445,42,903 -446,48,899 -445,23,903 -445,46,901 -448,37,895 -447,30,905 -450,48,899 -449,1,901 -449,20,900 -455,2,895 -452,11,903 -455,18,905 -455,40,899 -455,37,900 -456,19,898 -459,17,903 -461,29,899 -462,5,904 -464,23,905 -462,47,904 -461,20,902 -464,15,896 -465,32,901 -469,49,902 -468,49,896 -466,39,903 -470,29,895 -471,10,895 -469,39,903 -475,25,902 -474,30,897 -476,43,897 -475,12,903 -476,20,901 -477,24,897 -479,28,895 -477,43,899 -483,34,905 -481,2,903 -482,22,898 -486,25,900 -486,47,900 -483,22,897 -485,7,904 -486,1,901 -486,39,895 -492,37,899 -488,19,896 -493,35,905 -494,31,904 -493,42,900 -495,8,897 -495,25,903 -495,43,899 -500,42,897 -496,1,904 -497,0,896 -499,25,899 -501,14,901 -145,71,901 -121,63,901 -124,71,900 -12,53,895 -140,77,901 -164,55,901 -199,87,896 -114,95,899 -32,73,896 -78,96,900 -82,90,905 -86,53,896 -144,59,902 -125,100,899 -34,83,902 -61,87,902 -155,84,905 -129,96,905 -151,69,900 -152,52,898 -47,53,905 -9,82,896 -168,74,898 -156,71,895 -16,60,903 -148,100,895 -176,67,903 -194,59,902 -189,84,900 -73,55,901 -35,57,895 -76,76,901 -119,86,897 -196,100,900 -29,59,901 -45,62,895 -109,99,905 -121,62,905 -185,79,898 -41,99,903 -18,52,896 -124,93,898 -123,100,905 -173,54,903 -3,99,903 -11,61,895 -187,64,896 -109,80,900 -99,94,896 -143,100,896 -118,62,896 -23,53,896 -134,75,899 -140,65,904 -128,51,900 -117,82,901 -63,92,903 -188,85,896 -149,71,903 -199,85,905 -132,61,902 -75,97,900 -69,69,905 -107,89,895 -39,54,902 -102,61,895 -104,88,902 -185,85,905 -152,82,901 -109,60,905 -1,92,905 -176,57,903 -78,72,902 -17,94,897 -3,74,899 -189,74,903 -70,65,900 -167,73,897 -70,60,904 -124,56,905 -40,87,902 -191,99,900 -101,63,898 -63,57,896 -181,77,900 -131,76,903 -109,72,903 -130,89,905 -185,54,897 -119,76,899 -18,55,897 -84,77,898 -145,69,900 -180,71,904 -3,55,901 -138,69,901 -181,78,902 -177,83,895 -179,61,895 -174,75,904 -124,53,896 -52,82,901 -92,96,902 -197,55,895 -34,55,905 -73,90,901 -82,83,905 -104,98,902 -9,58,903 -49,72,901 -154,87,895 -84,68,900 -160,62,905 -148,94,898 -157,79,900 -138,76,898 -169,56,905 -160,61,900 -186,68,903 -27,81,901 -118,77,896 -26,93,900 -22,90,904 -172,64,898 -131,56,898 -140,86,896 -87,60,902 -12,70,899 -13,58,905 -89,69,905 -144,71,897 -18,52,902 -46,77,897 -94,71,905 -162,61,900 -20,75,904 -109,70,901 -164,82,900 -118,92,901 -160,94,895 -115,51,901 -19,75,902 -67,72,900 -63,73,902 -198,80,896 -125,79,899 -110,88,903 -66,100,899 -121,81,902 -66,76,895 -88,66,899 -146,60,895 -134,71,904 -120,51,897 -19,65,900 -117,55,903 -144,73,896 -173,53,896 -71,54,895 -199,68,904 -39,76,897 -149,76,896 -154,89,895 -162,73,905 -118,95,905 -46,98,895 -93,87,898 -158,77,902 -61,60,899 -67,93,898 -90,72,896 -82,65,897 -107,79,895 -43,89,903 -128,78,898 -123,51,900 -96,55,902 -175,68,902 -138,84,898 -94,71,902 -70,86,899 -92,76,902 -198,78,896 -96,56,896 -29,79,904 -9,56,903 -158,57,901 -78,57,904 -80,99,897 -102,88,902 -147,60,896 -173,94,904 -156,99,900 -45,56,897 -183,72,905 -200,51,898 -96,87,900 -186,89,905 -160,61,901 -27,88,896 -119,53,904 -107,94,898 -143,73,903 -118,76,902 -60,58,895 -136,54,904 -146,73,905 -34,53,898 -5,75,904 -190,85,901 -115,97,903 -75,73,895 -106,88,900 -59,56,901 -122,71,902 -175,99,895 -142,92,898 -145,99,901 -80,95,900 -90,77,898 -86,61,900 -2,78,905 -35,74,904 -166,58,896 -143,99,905 -90,56,895 -133,54,905 -173,65,898 -166,68,901 -183,53,895 -0,71,900 -114,87,904 -197,99,902 -162,61,895 -68,93,895 -151,92,896 -140,79,900 -89,58,904 -175,90,901 -196,99,900 -127,68,904 -10,78,902 -22,57,899 -124,56,895 -169,79,902 -70,90,896 -179,92,898 -181,61,898 -34,87,903 -153,98,904 -60,57,898 -4,77,899 -176,70,905 -103,82,895 -54,63,903 -120,73,904 -187,77,901 -37,94,896 -12,91,898 -132,88,902 -199,75,898 -45,84,900 -37,90,905 -110,53,897 -34,83,903 -185,75,900 -67,93,895 -26,71,900 -16,77,899 -22,78,896 -10,51,899 -160,95,898 -6,66,905 -36,90,899 -185,67,904 -77,90,898 -103,69,904 -136,80,904 -144,93,900 -116,57,901 -113,83,905 -169,100,904 -28,76,903 -103,67,896 -186,71,904 -154,63,895 -29,81,901 -167,98,903 -169,79,903 -21,64,898 -3,58,901 -109,91,903 -131,53,896 -124,76,900 -170,99,903 -80,92,895 -142,77,895 -142,77,904 -152,55,901 -183,73,903 -7,92,901 -44,77,903 -164,79,898 -3,55,895 -182,73,896 -68,80,900 -162,51,903 -112,73,896 -78,69,897 -150,82,905 -78,68,903 -107,77,898 -122,96,902 -128,68,897 -131,63,895 -45,51,896 -6,95,905 -43,69,897 -68,89,904 -9,64,904 -112,91,905 -133,55,897 -27,88,903 -99,51,895 -199,55,895 -83,59,903 -127,76,896 -128,92,903 -67,70,896 -44,87,896 -96,78,901 -121,84,899 -60,65,905 -118,84,903 -68,96,903 -143,72,899 -105,74,900 -101,67,901 -79,88,899 -64,55,901 -120,78,905 -106,68,898 -193,97,902 -45,51,902 -67,82,899 -65,89,899 -176,77,895 -161,65,899 -56,77,902 -162,71,895 -120,69,905 -30,77,895 -12,90,901 -12,54,895 -41,55,901 -118,81,896 -51,96,905 -14,71,899 -178,63,904 -175,79,900 -95,59,904 -21,84,895 -21,57,905 -171,51,903 -2,63,904 -173,54,902 -75,61,895 -19,66,900 -154,69,896 -120,77,897 -154,59,897 -147,64,900 -60,87,902 -93,96,896 -16,78,905 -157,84,897 -9,88,905 -143,70,896 -8,54,895 -173,90,904 -51,72,899 -155,76,896 -43,59,905 -50,85,900 -116,94,895 -180,88,896 -190,85,904 -168,74,905 -82,89,900 -114,62,897 -165,75,902 -10,58,899 -193,79,896 -10,76,899 -192,58,900 -182,97,901 -27,75,905 -141,53,898 -25,79,901 -52,96,902 -176,52,902 -94,82,899 -113,91,899 -57,84,903 -53,71,895 -25,95,899 -63,66,905 -23,67,904 -187,65,895 -25,96,905 -197,96,905 -180,55,897 -174,94,897 -6,94,900 -130,96,899 -110,82,905 -75,75,898 -107,69,896 -113,82,905 -32,90,899 -111,58,897 -107,86,899 -21,71,903 -0,92,897 -172,83,900 -44,61,904 -128,94,901 -127,92,895 -126,98,899 -130,70,895 -48,70,901 -127,77,895 -40,51,898 -29,71,895 -123,91,900 -140,65,898 -164,64,901 -48,78,899 -39,76,897 -33,72,900 -56,78,897 -67,56,904 -177,91,895 -77,51,898 -116,75,904 -132,85,903 -157,85,895 -65,99,901 -58,64,899 -51,91,903 -78,51,895 -4,58,905 -80,65,903 -9,54,895 -159,85,898 -105,75,904 -195,70,902 -80,53,905 -16,54,896 -93,61,904 -133,69,895 -103,98,901 -11,85,904 -3,75,899 -171,90,905 -122,99,904 -166,64,903 -98,96,904 -131,76,905 -70,70,905 -145,86,903 -9,64,905 -179,75,899 -106,90,900 -63,100,903 -38,75,897 -170,83,904 -54,79,901 -90,82,905 -120,94,898 -49,55,903 -191,57,905 -54,51,898 -74,66,902 -141,63,905 -139,75,898 -179,85,896 -136,96,898 -200,84,900 -136,71,895 -144,80,895 -124,53,897 -197,96,897 -93,76,904 -72,79,903 -78,54,900 -82,75,896 -3,83,897 -46,83,904 -82,65,905 diff --git a/data/raw/f_725_simon.py b/data/raw/f_725_simon_chien_edit.py similarity index 52% rename from data/raw/f_725_simon.py rename to data/raw/f_725_simon_chien_edit.py index 03745b98..301e2daa 100644 --- a/data/raw/f_725_simon.py +++ b/data/raw/f_725_simon_chien_edit.py @@ -1,131 +1,114 @@ -import pandas as pd -from statsmodels.tsa.stattools import adfuller - - -def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool: - ''' - Determines if a specific subset of data is stationary. - - Functionality: - 1. Filters rows where column_b > 50 and column_c == 900. - 2. Checks if the resulting subset of data in column_a is stationary using the Augmented Dickey-Fuller test. - 3. Returns True if the data is stationary, False otherwise. - - Data is considered to be stationary if the p_value returned by the - Augmented Dickey-Fuller test is smaller than 0.05. - - If column_a is empty after filtering or if its values are constant, True - is returned. - - Parameters: - df (pd.DataFrame): A DataFrame containing the data. - column_a (str): The name of the column to test for stationarity. - column_b (str): The name of the column used for filtering based on its value being greater than 50. - column_c (str): The name of the column used for filtering based on its value being equal to 900. - - Output: - bool: True if the data in column_a (after filtering based on column_b and column_c) is stationary, False otherwise. - - Requirements: - pandas - statsmodels: for using the adfuller test - - Example: - >>> df = pd.DataFrame({ - ... 'A': [1, 2, 3, 4, 5, 6], - ... 'B': [60, 70, 80, 90, 100, 110], - ... 'C': [900, 900, 900, 900, 900, 900] - ... }) - >>> f_725(df, 'A', 'B', 'C') - False - - >>> df = pd.DataFrame({ - ... 'TempA': [], - ... 'TempB': [], - ... 'TempC': [] - ... }) - >>> f_725(df, 'TempA', 'TempB', 'TempC') - True - ''' - # Filter rows based on column_b and column_c - filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] - - if filtered_df[column_a].nunique() <= 1: - return True - - # If dataframe is empty after filtering, return False - if filtered_df.empty: - return True - - # Perform Augmented Dickey-Fuller test - adf_result = adfuller(filtered_df[column_a]) - p_value = adf_result[1] - return p_value <= 0.05 - - -import unittest -import os -import pandas as pd - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - - -class TestCases(unittest.TestCase): - - def setUp(self) -> None: - self.csv_path = os.path.join('f_725_data_simon', 'complex_data.csv') - - def test_case_1(self): - df = pd.read_csv(self.csv_path) - df = df.rename(columns={'A': 'X', 'B': 'Y', 'C': 'Z'}) - df_sliced = df.iloc[:500] - result = f_725(df_sliced, 'X', 'Y', 'Z') - self.assertTrue(result) - - def test_case_2(self): - df = pd.read_csv(self.csv_path) - df = df.rename(columns={'A': 'Alpha', 'B': 'Beta', 'C': 'Gamma'}) - df_sliced = df.iloc[500:] - result = f_725(df_sliced, 'Alpha', 'Beta', 'Gamma') - self.assertTrue(result) - - def test_case_3(self): - df = pd.read_csv(self.csv_path) - df = df.rename(columns={'A': 'Col1', 'B': 'Col2', 'C': 'Col3'}) - df['Col1'] = 50 - result = f_725(df, 'Col1', 'Col2', 'Col3') - self.assertTrue(result) - - def test_case_4(self): - df = pd.read_csv(self.csv_path) - df = df.rename(columns={'A': 'Data1', 'B': 'Data2', 'C': 'Data3'}) - df['Data2'] = 40 - df['Data3'] = 910 - result = f_725(df, 'Data1', 'Data2', 'Data3') - self.assertTrue(result) - - def test_case_5(self): - df = pd.DataFrame({ - 'TempA': [], - 'TempB': [], - 'TempC': [] - }) - result = f_725(df, 'TempA', 'TempB', 'TempC') - self.assertTrue(result) - - def test_case_6(self): - df = pd.read_csv(self.csv_path) - df = df.rename(columns={'A': 'Data1', 'B': 'Data2', 'C': 'Data3'}) - df['Data2'] = 80 - df['Data3'] = 900 - df['Data1'] = [5*x**5 for x in range(len(df['Data1']))] - result = f_725(df, 'Data1', 'Data2', 'Data3') - self.assertFalse(result) - - -if __name__ == '__main__': - run_tests() +import pandas as pd +from statsmodels.tsa.stattools import adfuller + + +def f_725(df: pd.DataFrame, column_a: str, column_b: str, column_c: str) -> bool: + """ + Determines if a specific subset of data is stationary. + + Functionality: + 1. Filters rows where column_b > 50 and column_c == 900. + 2. Checks if the resulting subset of data in column_a is stationary using the Augmented Dickey-Fuller test. + 3. Returns True if the data is stationary, False otherwise. + + Data is considered to be stationary if the p_value returned by the + Augmented Dickey-Fuller test is smaller than 0.05. + + If column_a is empty after filtering or if its values are constant, True + is returned. + + Parameters: + df (pd.DataFrame): A DataFrame containing the data. + column_a (str): The name of the column to test for stationarity. + column_b (str): The name of the column used for filtering based on its value being greater than 50. + column_c (str): The name of the column used for filtering based on its value being equal to 900. + + Output: + bool: True if the data in column_a (after filtering based on column_b and column_c) is stationary, False otherwise. + + Requirements: + pandas + statsmodels: for using the adfuller test + + Example: + >>> df = pd.DataFrame({ + ... 'A': [1, 2, 3, 4, 5, 6], + ... 'B': [60, 70, 80, 90, 100, 110], + ... 'C': [900, 900, 900, 900, 900, 900] + ... }) + >>> f_725(df, 'A', 'B', 'C') + False + """ + # Filter rows based on column_b and column_c + filtered_df = df[(df[column_b] > 50) & (df[column_c] == 900)] + + if filtered_df[column_a].nunique() <= 1: + return True + + # If dataframe is empty after filtering, return False + if filtered_df.empty: + return True + + # Perform Augmented Dickey-Fuller test + adf_result = adfuller(filtered_df[column_a]) + p_value = adf_result[1] + return p_value <= 0.05 + + +import unittest +import os +import pandas as pd + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create DataFrame in setUp for test isolation + self.data = pd.DataFrame({ + 'A': list(range(100)), + 'B': [x * 2 for x in range(100)], + 'C': [900 if x % 2 == 0 else 800 for x in range(100)] + }) + + def test_constant_value(self): + # All values in column A are constant after filtering + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is constant.") + + def test_empty_after_filter(self): + # After filtering, no rows remain + result = f_725(self.data[self.data['B'] > 1000], 'A', 'B', 'C') + self.assertTrue(result, "Should be True as no data remains after filter.") + + def test_non_stationary_data(self): + # Test a clearly non-stationary dataset + result = f_725(self.data, 'A', 'B', 'C') + self.assertFalse(result, "Should be False as data is non-stationary.") + + def test_stationary_data(self): + # Test a stationary dataset + self.data['A'] = 5 + result = f_725(self.data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True as data is stationary.") + + def test_edge_case_small_dataset(self): + # Test a very small dataset + small_data = pd.DataFrame({ + 'A': [1, 1], + 'B': [60, 70], + 'C': [900, 900] + }) + result = f_725(small_data, 'A', 'B', 'C') + self.assertTrue(result, "Should be True due to small dataset size or no variation.") + + +if __name__ == '__main__': + run_tests() diff --git a/data/raw/f_728_data_simon/data_large.csv b/data/raw/f_728_data_simon/data_large.csv deleted file mode 100644 index 435d7a0c..00000000 --- a/data/raw/f_728_data_simon/data_large.csv +++ /dev/null @@ -1,201 +0,0 @@ -Name,Age,Score -Ashley,29,76 -James,24,73 -John,29,94 -Jack,23,86 -Melissa,23,77 -Holly,23,70 -Heather,27,59 -Elizabeth,21,92 -Stacy,23,57 -Carol,23,87 -Frank,18,100 -Kelly,24,76 -John,20,69 -Barbara,23,52 -Deanna,19,63 -Lawrence,25,51 -Daniel,20,68 -Cindy,19,63 -Erik,25,90 -Courtney,21,50 -Patrick,23,95 -Hannah,24,90 -Alex,25,66 -Joseph,22,99 -Phillip,22,86 -Jason,28,94 -Alexander,21,79 -Christopher,21,91 -Emma,22,84 -Jon,28,79 -Stephanie,26,51 -Yolanda,22,76 -Tiffany,26,93 -Victoria,26,76 -Brenda,22,80 -Robert,26,50 -Ann,27,76 -Teresa,29,96 -Bonnie,25,61 -Kevin,28,83 -Robert,26,90 -Katie,25,83 -Ricky,30,52 -John,30,59 -Karen,27,100 -Kimberly,30,94 -Nicholas,19,57 -Jamie,18,94 -Michael,21,78 -William,26,100 -Scott,25,72 -Lisa,30,99 -Christopher,21,73 -Nicole,24,96 -Thomas,29,97 -Sarah,29,76 -Mason,21,53 -Rebecca,27,82 -Joshua,22,81 -Daniel,30,68 -Carol,23,89 -Michelle,24,53 -Joseph,19,80 -Brian,21,100 -Lawrence,28,69 -Brandon,25,89 -Kent,18,85 -Rebecca,28,100 -Joseph,26,61 -Molly,20,50 -William,23,67 -Jason,27,59 -Emily,21,54 -Robert,23,60 -Richard,22,69 -Patricia,25,55 -Theresa,23,100 -Sarah,23,95 -Brooke,24,91 -Mark,20,50 -Andrew,21,78 -Jasmine,24,87 -Brian,21,82 -Jessica,23,53 -Brian,21,55 -Edward,21,56 -Debra,27,63 -Amy,18,51 -Matthew,28,60 -Robert,18,74 -Susan,18,71 -Luis,21,76 -Shannon,19,82 -Richard,25,51 -Makayla,23,84 -Richard,30,76 -Derrick,22,67 -Bryan,26,56 -Darryl,25,81 -Donna,29,64 -Joshua,30,68 -Janice,24,71 -James,25,76 -Diane,19,58 -Christopher,21,86 -Denise,24,82 -Ryan,30,83 -Melissa,20,62 -Keith,20,100 -James,28,91 -Steven,27,54 -Leslie,18,85 -Dominic,30,55 -Madison,29,80 -Alec,30,64 -Sherry,25,96 -Matthew,29,84 -Joel,27,76 -Paula,30,85 -Benjamin,26,60 -David,30,57 -Michael,22,67 -Justin,30,66 -Bob,29,53 -Nicole,25,88 -Alexandria,24,83 -Timothy,24,90 -Dylan,20,96 -David,29,62 -Nicole,28,97 -Angel,21,81 -Pamela,26,94 -Lauren,28,96 -Jessica,26,55 -Julie,26,60 -William,26,75 -George,21,99 -William,30,57 -Anna,30,54 -Kristine,19,84 -Lauren,21,92 -Diana,30,79 -David,19,82 -Marcus,26,79 -Charles,27,84 -Devin,23,88 -Patrick,24,53 -Andrew,24,53 -Adam,27,61 -Michelle,25,73 -Michelle,25,59 -Richard,23,100 -Briana,30,78 -Katherine,20,62 -Jacqueline,28,93 -Jonathon,30,77 -Craig,30,93 -Marcus,18,75 -Mark,23,98 -Scott,22,54 -Virginia,20,72 -Catherine,20,79 -Jeremy,22,65 -Shelly,19,73 -Paul,19,75 -Amanda,28,78 -Virginia,19,65 -Brittany,20,93 -Carrie,19,85 -Jessica,26,75 -Adam,27,76 -Adrian,28,86 -Steven,25,82 -Amy,21,100 -Bridget,23,69 -Kimberly,24,62 -Scott,22,85 -Angela,20,88 -Lynn,30,81 -Christine,30,54 -Nathaniel,24,54 -James,26,92 -Megan,20,81 -Mark,29,85 -Linda,25,67 -Jessica,26,86 -Danielle,21,53 -Michael,29,98 -Kelly,27,73 -Jamie,22,72 -Michelle,19,74 -Caleb,18,56 -Cassidy,18,66 -Jessica,23,70 -Matthew,21,57 -Maria,18,96 -Jenny,22,87 -Kim,26,80 -Kathryn,28,85 -Vanessa,24,78 diff --git a/data/raw/f_728_data_simon/data_medium.csv b/data/raw/f_728_data_simon/data_medium.csv deleted file mode 100644 index 7df43077..00000000 --- a/data/raw/f_728_data_simon/data_medium.csv +++ /dev/null @@ -1,51 +0,0 @@ -Name,Age,Score -Thomas,26,76 -William,23,59 -Charles,26,65 -Catherine,23,96 -Robert,27,95 -Corey,23,76 -Andrew,29,94 -Christian,22,87 -Natalie,18,58 -Terri,18,100 -Randy,18,68 -Charles,21,50 -Andrew,18,86 -Teresa,19,62 -Cindy,23,77 -Danielle,19,64 -Heather,19,66 -Colton,22,100 -Jason,29,69 -Lisa,24,77 -Sonya,23,76 -William,18,56 -Monica,30,53 -Denise,23,81 -Christina,19,68 -Diana,30,70 -Melanie,29,76 -Nicole,23,96 -Charles,22,94 -Arthur,21,90 -Richard,24,73 -Patrick,20,82 -Matthew,21,80 -Jennifer,21,74 -Jennifer,27,80 -Eric,20,60 -Kevin,18,61 -Curtis,18,74 -Matthew,23,57 -Julie,19,90 -Stanley,18,78 -Savannah,29,66 -Stephen,24,73 -Adrian,18,58 -Ryan,25,74 -Ann,23,93 -Kimberly,26,61 -Nicholas,30,80 -Cassandra,28,61 -Patricia,23,89 diff --git a/data/raw/f_728_data_simon/data_small.csv b/data/raw/f_728_data_simon/data_small.csv deleted file mode 100644 index 25af00dc..00000000 --- a/data/raw/f_728_data_simon/data_small.csv +++ /dev/null @@ -1,11 +0,0 @@ -Name,Age,Score -Lisa,23,87 -Stephen,18,78 -Brent,23,66 -Sheri,22,75 -Karen,22,51 -Vincent,22,70 -Justin,21,80 -Russell,20,97 -Ryan,18,83 -Brianna,21,98 diff --git a/data/raw/f_728_data_simon/large_res.csv b/data/raw/f_728_data_simon/large_res.csv deleted file mode 100644 index 3ea344da..00000000 --- a/data/raw/f_728_data_simon/large_res.csv +++ /dev/null @@ -1,201 +0,0 @@ -,Name,Age,Score -148,Adam,27,61 -170,Adam,27,76 -171,Adrian,28,86 -114,Alec,30,64 -22,Alex,25,66 -26,Alexander,21,79 -125,Alexandria,24,83 -165,Amanda,28,78 -87,Amy,18,51 -173,Amy,21,100 -80,Andrew,21,78 -147,Andrew,24,53 -130,Angel,21,81 -177,Angela,20,88 -36,Ann,27,76 -138,Anna,30,54 -0,Ashley,29,76 -13,Barbara,23,52 -119,Benjamin,26,60 -123,Bob,29,53 -38,Bonnie,25,61 -65,Brandon,25,89 -34,Brenda,22,80 -63,Brian,21,100 -82,Brian,21,82 -84,Brian,21,55 -152,Briana,30,78 -174,Bridget,23,69 -167,Brittany,20,93 -78,Brooke,24,91 -97,Bryan,26,56 -191,Caleb,18,56 -9,Carol,23,87 -60,Carol,23,89 -168,Carrie,19,85 -192,Cassidy,18,66 -161,Catherine,20,79 -144,Charles,27,84 -179,Christine,30,54 -27,Christopher,21,91 -52,Christopher,21,73 -104,Christopher,21,86 -17,Cindy,19,63 -19,Courtney,21,50 -156,Craig,30,93 -16,Daniel,20,68 -59,Daniel,30,68 -186,Danielle,21,53 -98,Darryl,25,81 -142,David,19,82 -128,David,29,62 -120,David,30,57 -14,Deanna,19,63 -86,Debra,27,63 -105,Denise,24,82 -96,Derrick,22,67 -145,Devin,23,88 -141,Diana,30,79 -103,Diane,19,58 -112,Dominic,30,55 -99,Donna,29,64 -127,Dylan,20,96 -85,Edward,21,56 -7,Elizabeth,21,92 -72,Emily,21,54 -28,Emma,22,84 -18,Erik,25,90 -10,Frank,18,100 -136,George,21,99 -21,Hannah,24,90 -6,Heather,27,59 -5,Holly,23,70 -3,Jack,23,86 -154,Jacqueline,28,93 -1,James,24,73 -102,James,25,76 -181,James,26,92 -109,James,28,91 -47,Jamie,18,94 -189,Jamie,22,72 -101,Janice,24,71 -81,Jasmine,24,87 -71,Jason,27,59 -25,Jason,28,94 -196,Jenny,22,87 -162,Jeremy,22,65 -83,Jessica,23,53 -193,Jessica,23,70 -133,Jessica,26,55 -169,Jessica,26,75 -185,Jessica,26,86 -117,Joel,27,76 -12,John,20,69 -2,John,29,94 -43,John,30,59 -29,Jon,28,79 -155,Jonathon,30,77 -62,Joseph,19,80 -23,Joseph,22,99 -68,Joseph,26,61 -58,Joshua,22,81 -100,Joshua,30,68 -134,Julie,26,60 -122,Justin,30,66 -44,Karen,27,100 -153,Katherine,20,62 -198,Kathryn,28,85 -41,Katie,25,83 -108,Keith,20,100 -11,Kelly,24,76 -188,Kelly,27,73 -66,Kent,18,85 -39,Kevin,28,83 -197,Kim,26,80 -175,Kimberly,24,62 -45,Kimberly,30,94 -139,Kristine,19,84 -140,Lauren,21,92 -132,Lauren,28,96 -15,Lawrence,25,51 -64,Lawrence,28,69 -111,Leslie,18,85 -184,Linda,25,67 -51,Lisa,30,99 -91,Luis,21,76 -178,Lynn,30,81 -113,Madison,29,80 -94,Makayla,23,84 -157,Marcus,18,75 -143,Marcus,26,79 -195,Maria,18,96 -79,Mark,20,50 -158,Mark,23,98 -183,Mark,29,85 -56,Mason,21,53 -194,Matthew,21,57 -88,Matthew,28,60 -116,Matthew,29,84 -182,Megan,20,81 -107,Melissa,20,62 -4,Melissa,23,77 -48,Michael,21,78 -121,Michael,22,67 -187,Michael,29,98 -190,Michelle,19,74 -61,Michelle,24,53 -149,Michelle,25,73 -150,Michelle,25,59 -69,Molly,20,50 -180,Nathaniel,24,54 -46,Nicholas,19,57 -53,Nicole,24,96 -124,Nicole,25,88 -129,Nicole,28,97 -131,Pamela,26,94 -75,Patricia,25,55 -20,Patrick,23,95 -146,Patrick,24,53 -164,Paul,19,75 -118,Paula,30,85 -24,Phillip,22,86 -57,Rebecca,27,82 -67,Rebecca,28,100 -74,Richard,22,69 -151,Richard,23,100 -93,Richard,25,51 -95,Richard,30,76 -42,Ricky,30,52 -89,Robert,18,74 -73,Robert,23,60 -35,Robert,26,50 -40,Robert,26,90 -106,Ryan,30,83 -77,Sarah,23,95 -55,Sarah,29,76 -159,Scott,22,54 -176,Scott,22,85 -50,Scott,25,72 -92,Shannon,19,82 -163,Shelly,19,73 -115,Sherry,25,96 -8,Stacy,23,57 -30,Stephanie,26,51 -172,Steven,25,82 -110,Steven,27,54 -90,Susan,18,71 -37,Teresa,29,96 -76,Theresa,23,100 -54,Thomas,29,97 -32,Tiffany,26,93 -126,Timothy,24,90 -199,Vanessa,24,78 -33,Victoria,26,76 -166,Virginia,19,65 -160,Virginia,20,72 -70,William,23,67 -49,William,26,100 -135,William,26,75 -137,William,30,57 -31,Yolanda,22,76 diff --git a/data/raw/f_728_data_simon/large_series.csv b/data/raw/f_728_data_simon/large_series.csv deleted file mode 100644 index 73efcf8f..00000000 --- a/data/raw/f_728_data_simon/large_series.csv +++ /dev/null @@ -1,143 +0,0 @@ -Name,Score -Adam,68.5 -Adrian,86.0 -Alec,64.0 -Alex,66.0 -Alexander,79.0 -Alexandria,83.0 -Amanda,78.0 -Amy,75.5 -Andrew,65.5 -Angel,81.0 -Angela,88.0 -Ann,76.0 -Anna,54.0 -Ashley,76.0 -Barbara,52.0 -Benjamin,60.0 -Bob,53.0 -Bonnie,61.0 -Brandon,89.0 -Brenda,80.0 -Brian,79.0 -Briana,78.0 -Bridget,69.0 -Brittany,93.0 -Brooke,91.0 -Bryan,56.0 -Caleb,56.0 -Carol,88.0 -Carrie,85.0 -Cassidy,66.0 -Catherine,79.0 -Charles,84.0 -Christine,54.0 -Christopher,83.33333333333333 -Cindy,63.0 -Courtney,50.0 -Craig,93.0 -Daniel,68.0 -Danielle,53.0 -Darryl,81.0 -David,67.0 -Deanna,63.0 -Debra,63.0 -Denise,82.0 -Derrick,67.0 -Devin,88.0 -Diana,79.0 -Diane,58.0 -Dominic,55.0 -Donna,64.0 -Dylan,96.0 -Edward,56.0 -Elizabeth,92.0 -Emily,54.0 -Emma,84.0 -Erik,90.0 -Frank,100.0 -George,99.0 -Hannah,90.0 -Heather,59.0 -Holly,70.0 -Jack,86.0 -Jacqueline,93.0 -James,83.0 -Jamie,83.0 -Janice,71.0 -Jasmine,87.0 -Jason,76.5 -Jenny,87.0 -Jeremy,65.0 -Jessica,67.8 -Joel,76.0 -John,74.0 -Jon,79.0 -Jonathon,77.0 -Joseph,80.0 -Joshua,74.5 -Julie,60.0 -Justin,66.0 -Karen,100.0 -Katherine,62.0 -Kathryn,85.0 -Katie,83.0 -Keith,100.0 -Kelly,74.5 -Kent,85.0 -Kevin,83.0 -Kim,80.0 -Kimberly,78.0 -Kristine,84.0 -Lauren,94.0 -Lawrence,60.0 -Leslie,85.0 -Linda,67.0 -Lisa,99.0 -Luis,76.0 -Lynn,81.0 -Madison,80.0 -Makayla,84.0 -Marcus,77.0 -Maria,96.0 -Mark,77.66666666666667 -Mason,53.0 -Matthew,67.0 -Megan,81.0 -Melissa,69.5 -Michael,81.0 -Michelle,64.75 -Molly,50.0 -Nathaniel,54.0 -Nicholas,57.0 -Nicole,93.66666666666667 -Pamela,94.0 -Patricia,55.0 -Patrick,74.0 -Paul,75.0 -Paula,85.0 -Phillip,86.0 -Rebecca,91.0 -Richard,74.0 -Ricky,52.0 -Robert,68.5 -Ryan,83.0 -Sarah,85.5 -Scott,70.33333333333333 -Shannon,82.0 -Shelly,73.0 -Sherry,96.0 -Stacy,57.0 -Stephanie,51.0 -Steven,68.0 -Susan,71.0 -Teresa,96.0 -Theresa,100.0 -Thomas,97.0 -Tiffany,93.0 -Timothy,90.0 -Vanessa,78.0 -Victoria,76.0 -Virginia,68.5 -William,74.75 -Yolanda,76.0 diff --git a/data/raw/f_728_data_simon/medium_res.csv b/data/raw/f_728_data_simon/medium_res.csv deleted file mode 100644 index 874b40b5..00000000 --- a/data/raw/f_728_data_simon/medium_res.csv +++ /dev/null @@ -1,51 +0,0 @@ -,Name,Age,Score -43,Adrian,18,58 -12,Andrew,18,86 -6,Andrew,29,94 -45,Ann,23,93 -29,Arthur,21,90 -48,Cassandra,28,61 -3,Catherine,23,96 -11,Charles,21,50 -28,Charles,22,94 -2,Charles,26,65 -7,Christian,22,87 -24,Christina,19,68 -14,Cindy,23,77 -17,Colton,22,100 -5,Corey,23,76 -37,Curtis,18,74 -15,Danielle,19,64 -23,Denise,23,81 -25,Diana,30,70 -35,Eric,20,60 -16,Heather,19,66 -18,Jason,29,69 -33,Jennifer,21,74 -34,Jennifer,27,80 -39,Julie,19,90 -36,Kevin,18,61 -46,Kimberly,26,61 -19,Lisa,24,77 -32,Matthew,21,80 -38,Matthew,23,57 -26,Melanie,29,76 -22,Monica,30,53 -8,Natalie,18,58 -47,Nicholas,30,80 -27,Nicole,23,96 -49,Patricia,23,89 -31,Patrick,20,82 -10,Randy,18,68 -30,Richard,24,73 -4,Robert,27,95 -44,Ryan,25,74 -41,Savannah,29,66 -20,Sonya,23,76 -40,Stanley,18,78 -42,Stephen,24,73 -13,Teresa,19,62 -9,Terri,18,100 -0,Thomas,26,76 -21,William,18,56 -1,William,23,59 diff --git a/data/raw/f_728_data_simon/medium_series.csv b/data/raw/f_728_data_simon/medium_series.csv deleted file mode 100644 index 808a42a2..00000000 --- a/data/raw/f_728_data_simon/medium_series.csv +++ /dev/null @@ -1,45 +0,0 @@ -Name,Score -Adrian,58.0 -Andrew,90.0 -Ann,93.0 -Arthur,90.0 -Cassandra,61.0 -Catherine,96.0 -Charles,69.66666666666667 -Christian,87.0 -Christina,68.0 -Cindy,77.0 -Colton,100.0 -Corey,76.0 -Curtis,74.0 -Danielle,64.0 -Denise,81.0 -Diana,70.0 -Eric,60.0 -Heather,66.0 -Jason,69.0 -Jennifer,77.0 -Julie,90.0 -Kevin,61.0 -Kimberly,61.0 -Lisa,77.0 -Matthew,68.5 -Melanie,76.0 -Monica,53.0 -Natalie,58.0 -Nicholas,80.0 -Nicole,96.0 -Patricia,89.0 -Patrick,82.0 -Randy,68.0 -Richard,73.0 -Robert,95.0 -Ryan,74.0 -Savannah,66.0 -Sonya,76.0 -Stanley,78.0 -Stephen,73.0 -Teresa,62.0 -Terri,100.0 -Thomas,76.0 -William,57.5 diff --git a/data/raw/f_728_simon.py b/data/raw/f_728_simon.py deleted file mode 100644 index 69e873cf..00000000 --- a/data/raw/f_728_simon.py +++ /dev/null @@ -1,208 +0,0 @@ -import pandas as pd -from collections import Counter - - -def f_728(data): - ''' - Analyze a dictionary of student data to return a dataframe sorted by name and age in ascending order, - the average score per student as a pandas Series, and the most common age as an integer. - - Parameters: - data (dict): A dictionary containing student data with three keys: - - 'Name': List of student names. - - 'Age': List of student ages. - - 'Score': List of student scores. - - Returns: - pd.DataFrame, pd.Series, int or None: - - A dataframe sorted by 'Name' and 'Age' in ascending order. - - A series representing average scores indexed by student names. - - An integer representing the most common age or None if no data is available. - - Raises: - ValueError: If the dictionary does not have the required keys. - - Requirements: - - pandas - - collections - - Example: - >>> data = { - ... 'Name': ['Tom', 'Nick', 'John', 'Tom', 'John', 'John', 'Nick', 'Tom', 'John', 'Tom'], - ... 'Age': [20, 21, 19, 20, 19, 19, 21, 20, 19, 20], - ... 'Score': [85, 79, 92, 88, 90, 92, 81, 86, 90, 85] - ... } - >>> df, avg_scores, common_age = f_728(data) - >>> print(df) - Name Age Score - 2 John 19 92 - 4 John 19 90 - 5 John 19 92 - 8 John 19 90 - 1 Nick 21 79 - 6 Nick 21 81 - 0 Tom 20 85 - 3 Tom 20 88 - 7 Tom 20 86 - 9 Tom 20 85 - >>> print(avg_scores) - Name - John 91.00 - Nick 80.00 - Tom 86.25 - Name: Score, dtype: float64 - >>> print(common_age) - 19 - - >>> data = { - ... 'Name': ['Simon', 'Alex', 'Tanja', 'Amanda', 'Tanja'], - ... 'Age': [21, 42, 54, 20, 54], - ... 'Score': [1, 1, 2, 3, 5] - ... } - >>> df, avg_scores, common_age = f_728(data) - >>> rint(df) - Name Age Score - 1 Alex 42 1 - 3 Amanda 20 3 - 0 Simon 21 1 - 2 Tanja 54 2 - 4 Tanja 54 5 - >>> print(avg_scores) - Name - Alex 1.0 - Amanda 3.0 - Simon 1.0 - Tanja 3.5 - Name: Score, dtype: float64 - >>> print(common_age) - 54 - ''' - - if not all(key in data for key in ['Name', 'Age', 'Score']): - raise ValueError("The dictionary must have the keys 'Name', 'Age', 'Score'") - - - # Creating a dataframe and sorting it - df = pd.DataFrame(data).sort_values(['Name', 'Age']) - - # Calculating average scores - avg_scores = df.groupby('Name')['Score'].mean() - - # Getting the most common age - age_counts = Counter(df['Age']) - most_common_age = age_counts.most_common(1)[0][0] if age_counts else None - - return df, avg_scores, most_common_age - - -import unittest -import pandas as pd -import os - -class TestCases(unittest.TestCase): - - def setUp(self): - self.data_small_path = os.path.join('f_728_data_simon', 'data_small.csv') - self.data_medium_path = os.path.join('f_728_data_simon', 'data_medium.csv') - self.data_large_path = os.path.join('f_728_data_simon', 'data_large.csv') - - - def test_wrong_keys(self): - data = { - 'a': ['Tom', 'Nick', 'John', 'Tom', 'John', 'John', 'Nick', 'Tom', 'John', 'Tom'], - 'Age': [20, 21, 19, 20, 19, 19, 21, 20, 19, 20], - 'Score': [85, 79, 92, 88, 90, 92, 81, 86, 90, 85] - } - self.assertRaises(Exception, f_728, data) - - - def test_case_1(self): - data_small = pd.read_csv(self.data_small_path).to_dict(orient='list') - df, avg_scores, common_age = f_728(data_small) - df_exp = pd.DataFrame( - {'Name': {2: 'Brent', - 9: 'Brianna', - 6: 'Justin', - 4: 'Karen', - 0: 'Lisa', - 7: 'Russell', - 8: 'Ryan', - 3: 'Sheri', - 1: 'Stephen', - 5: 'Vincent'}, - 'Age': {2: 23, 9: 21, 6: 21, 4: 22, 0: 23, 7: 20, 8: 18, 3: 22, 1: 18, 5: 22}, - 'Score': {2: 66, - 9: 98, - 6: 80, - 4: 51, - 0: 87, - 7: 97, - 8: 83, - 3: 75, - 1: 78, - 5: 70}} - ) - avg_exp = pd.Series({'Brent': 66.0, - 'Brianna': 98.0, - 'Justin': 80.0, - 'Karen': 51.0, - 'Lisa': 87.0, - 'Russell': 97.0, - 'Ryan': 83.0, - 'Sheri': 75.0, - 'Stephen': 78.0, - 'Vincent': 70.0}) - pd.testing.assert_frame_equal(df, df_exp) - pd.testing.assert_series_equal(avg_scores, avg_exp, check_index=False, check_names=False) - self.assertEqual(common_age, 22) - - def test_case_2(self): - data_medium = pd.read_csv(self.data_medium_path).to_dict(orient='list') - df, avg_scores, common_age = f_728(data_medium) - df_exp = pd.read_csv(os.path.join('f_728_data_simon', 'medium_res.csv'), index_col=0) - avg_exp = pd.read_csv(os.path.join('f_728_data_simon', 'medium_series.csv'), index_col=0, header=0).squeeze("columns") - - pd.testing.assert_frame_equal(df, df_exp) - pd.testing.assert_series_equal(avg_scores, avg_exp) - self.assertEqual(common_age, 23) - - def test_case_3(self): - data_large = pd.read_csv(self.data_large_path).to_dict(orient='list') - df, avg_scores, common_age = f_728(data_large) - df_exp = pd.read_csv(os.path.join('f_728_data_simon', 'large_res.csv'), index_col=0) - avg_exp = pd.read_csv(os.path.join('f_728_data_simon', 'large_series.csv'), index_col=0, header=0).squeeze("columns") - pd.testing.assert_frame_equal(df, df_exp) - pd.testing.assert_series_equal(avg_scores, avg_exp) - - self.assertEqual(common_age, 30) - - def test_case_4(self): - data = { - 'Name': ['M', 'M', 'M'], - 'Age': [40, 40, 40], - 'Score': [70, 70, 70] - } - df, avg_scores, common_age = f_728(data) - self.assertEqual(df.shape, (3, 3)) - self.assertAlmostEqual(avg_scores['M'], 70.0) - self.assertEqual(common_age, 40) - - def test_case_5(self): - data = { - 'Name': [], - 'Age': [], - 'Score': [] - } - df, avg_scores, common_age = f_728(data) - self.assertEqual(df.shape, (0, 3)) - self.assertTrue(avg_scores.empty) - self.assertIsNone(common_age) - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_728_simon_chien_edit.py b/data/raw/f_728_simon_chien_edit.py new file mode 100644 index 00000000..cf744212 --- /dev/null +++ b/data/raw/f_728_simon_chien_edit.py @@ -0,0 +1,144 @@ +import pandas as pd +from collections import Counter + + +def f_728(data): + """ + Analyze a dictionary of student data to return a dataframe sorted by name and age in ascending order, + the average score per student as a pandas Series, and the most common age as an integer. + + Parameters: + data (dict): A dictionary containing student data with three keys: + - 'Name': List of student names. + - 'Age': List of student ages. + - 'Score': List of student scores. + + Returns: + pd.DataFrame, pd.Series, int or None: + - A dataframe sorted by 'Name' and 'Age' in ascending order. + - A series representing average scores indexed by student names. + - An integer representing the most common age or None if no data is available. + + Raises: + ValueError: If the dictionary does not have the required keys. + + Requirements: + - pandas + - collections + + Example: + >>> data = { + ... 'Name': ['Tom', 'Nick', 'John', 'Tom', 'John', 'John', 'Nick', 'Tom', 'John', 'Tom'], + ... 'Age': [20, 21, 19, 20, 19, 19, 21, 20, 19, 20], + ... 'Score': [85, 79, 92, 88, 90, 92, 81, 86, 90, 85] + ... } + >>> df, avg_scores, common_age = f_728(data) + >>> print(df) + Name Age Score + 2 John 19 92 + 4 John 19 90 + 5 John 19 92 + 8 John 19 90 + 1 Nick 21 79 + 6 Nick 21 81 + 0 Tom 20 85 + 3 Tom 20 88 + 7 Tom 20 86 + 9 Tom 20 85 + """ + + if not all(key in data for key in ['Name', 'Age', 'Score']): + raise ValueError("The dictionary must have the keys 'Name', 'Age', 'Score'") + + # Creating a dataframe and sorting it + df = pd.DataFrame(data).sort_values(['Name', 'Age']) + + # Calculating average scores + avg_scores = df.groupby('Name')['Score'].mean() + + # Getting the most common age + age_counts = Counter(df['Age']) + most_common_age = age_counts.most_common(1)[0][0] if age_counts else None + + return df, avg_scores, most_common_age + + +import unittest +import pandas as pd +import os + + +class TestCases(unittest.TestCase): + + def test_wrong_keys(self): + # Testing with incorrect dictionary keys + data = { + 'Names': ['Tom', 'Nick'], + 'Ages': [20, 21], + 'Scores': [85, 79] + } + with self.assertRaises(ValueError): + f_728(data) + + def test_correct_processing(self): + # Testing with correctly formatted data + data = { + 'Name': ['Tom', 'Nick', 'Tom', 'John'], + 'Age': [20, 21, 20, 19], + 'Score': [85, 79, 88, 92] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(df.iloc[0]['Name'], 'John') + self.assertAlmostEqual(avg_scores['Tom'], 86.5) + self.assertEqual(common_age, 20) + + def test_empty_data(self): + # Testing with empty lists + data = {'Name': [], 'Age': [], 'Score': []} + df, avg_scores, common_age = f_728(data) + self.assertTrue(df.empty) + self.assertTrue(avg_scores.empty) + self.assertIsNone(common_age) + + def test_all_same_age(self): + # Testing with all students having the same age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [25, 25, 25], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 25) + + def test_no_common_age(self): + # Testing with no common age, each student has a unique age + data = { + 'Name': ['Alice', 'Bob', 'Cindy'], + 'Age': [24, 25, 26], + 'Score': [88, 92, 85] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(common_age, 24) # Assuming the first element is taken if all are equally common + + def test_duplicate_names_different_ages(self): + # Testing with duplicate names but different ages + data = { + 'Name': ['Tom', 'Tom', 'Nick'], + 'Age': [20, 21, 21], + 'Score': [85, 88, 79] + } + df, avg_scores, common_age = f_728(data) + self.assertEqual(len(df[df['Name'] == 'Tom']), 2) + self.assertNotEqual(df.iloc[0]['Age'], df.iloc[1]['Age']) + self.assertTrue(df[df['Name'] == 'Tom'].Age.isin([20, 21]).all()) + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_730_data_simon/test_file1.csv b/data/raw/f_730_data_simon/test_file1.csv deleted file mode 100644 index 70ae39f9..00000000 --- a/data/raw/f_730_data_simon/test_file1.csv +++ /dev/null @@ -1,3 +0,0 @@ -Name,Age,Gender -Alice,25,Female -Bob,30,Male diff --git a/data/raw/f_730_data_simon/test_file2.csv b/data/raw/f_730_data_simon/test_file2.csv deleted file mode 100644 index 9c68883e..00000000 --- a/data/raw/f_730_data_simon/test_file2.csv +++ /dev/null @@ -1,3 +0,0 @@ -Name,Age,Gender -Eve,22,Female -Charlie,28,Male diff --git a/data/raw/f_730_data_simon/test_file3.csv b/data/raw/f_730_data_simon/test_file3.csv deleted file mode 100644 index 88e5e729..00000000 --- a/data/raw/f_730_data_simon/test_file3.csv +++ /dev/null @@ -1,3 +0,0 @@ -Name,Age,Gender -Frank,35,Male -Grace,29,Female diff --git a/data/raw/f_730_data_simon/test_file4.csv b/data/raw/f_730_data_simon/test_file4.csv deleted file mode 100644 index 5ec7fc1c..00000000 --- a/data/raw/f_730_data_simon/test_file4.csv +++ /dev/null @@ -1,3 +0,0 @@ -Animal,Size,Weight -Cat, 4, 23 -Dog, 12, 100 diff --git a/data/raw/f_730_simon.py b/data/raw/f_730_simon.py deleted file mode 100644 index 76ba52e4..00000000 --- a/data/raw/f_730_simon.py +++ /dev/null @@ -1,126 +0,0 @@ -import pandas as pd -import os - -def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: - """ - Merge / Concatenate multiple CSV files from a specified directory into a single Pandas DataFrame. - - If an empty list of files is passed, an empty DataFrame is returned. - - Parameters: - data_dir (str): The directory path where the CSV files are located. - csv_files (list): A list of CSV file names to be merged. - - Returns: - pd.DataFrame: A pandas DataFrame with the merged data. - - Requirements: - - pandas - - os - - Example: - >>> df = f_730('/path/to/data/directory', ['file1.csv', 'file2.csv', 'file3.csv']) - >>> print(df.head()) - Name Age Gender - 0 Simon 5 Male - 1 Bobby 32 Male - 0 Elena 13 Female - 1 Tom 23 Male - 0 Franko 12 Male - - >>> df = f_730('/path/to/data/directory', ['file1.csv', 'other_file.csv]) - >>> print(df.head()) - Name Age Gender Animal Size - 0 Simon 5 Male None None - 1 Bobby 32 Male None None - 0 Elena 13 Female None None - 2 None None None Tiger 12 - """ - merged_df = pd.DataFrame() - - for file in csv_files: - file_path = os.path.join(data_dir, file) - df = pd.read_csv(file_path) - merged_df = pd.concat([merged_df, df]) - - return merged_df - -import unittest - - -import unittest - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -class TestCases(unittest.TestCase): - - def setUp(self): - self.test_data_dir = 'f_730_data_simon' - self.csv_files = ['test_file1.csv', 'test_file2.csv', 'test_file3.csv'] - - def test_case_1(self): - # Test with all three CSV files - df = f_730(self.test_data_dir, self.csv_files) - - merged_df = pd.DataFrame() - - for file in self.csv_files: - file_path = os.path.join(self.test_data_dir, file) - frame = pd.read_csv(file_path) - merged_df = pd.concat([merged_df, frame]) - pd.testing.assert_frame_equal(df, merged_df) - - def test_case_2(self): - # Test with only the first CSV file - df = f_730(self.test_data_dir, [self.csv_files[0]]) - merged_df = pd.DataFrame() - - for file in [self.csv_files[0]]: - file_path = os.path.join(self.test_data_dir, file) - frame = pd.read_csv(file_path) - merged_df = pd.concat([merged_df, frame]) - - pd.testing.assert_frame_equal(df, merged_df) - - - def test_case_3(self): - # Test with the first and third CSV files - df = f_730(self.test_data_dir, [self.csv_files[0], self.csv_files[2]]) - - merged_df = pd.DataFrame() - for file in [self.csv_files[0], self.csv_files[2]]: - file_path = os.path.join(self.test_data_dir, file) - frame = pd.read_csv(file_path) - merged_df = pd.concat([merged_df, frame]) - - pd.testing.assert_frame_equal(df, merged_df) - - def test_case_different_columns(self): - diff = 'test_file4.csv' - df = f_730(self.test_data_dir, [self.csv_files[0], diff]) - - merged_df = pd.DataFrame() - for file in [self.csv_files[0], diff]: - file_path = os.path.join(self.test_data_dir, file) - frame = pd.read_csv(file_path) - merged_df = pd.concat([merged_df, frame]) - - pd.testing.assert_frame_equal(df, merged_df) - - def test_case_4(self): - # Test with an empty list of CSV files - df = f_730(self.test_data_dir, []) - self.assertEqual(len(df), 0) # No rows should be present - - def test_case_5(self): - # Test with non-existing CSV file - with self.assertRaises(FileNotFoundError): - f_730(self.test_data_dir, ["non_existing.csv"]) - - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_730_simon_chien_edit.py b/data/raw/f_730_simon_chien_edit.py new file mode 100644 index 00000000..d5c68111 --- /dev/null +++ b/data/raw/f_730_simon_chien_edit.py @@ -0,0 +1,120 @@ +import pandas as pd +import os + + +def f_730(data_dir: str, csv_files: list) -> pd.DataFrame: + """ + Merge / Concatenate multiple CSV files from a specified directory into a single Pandas DataFrame. + + If an empty list of files is passed, an empty DataFrame is returned. + + Parameters: + data_dir (str): The directory path where the CSV files are located. + csv_files (list): A list of CSV file names to be merged. + + Returns: + pd.DataFrame: A pandas DataFrame with the merged data. + + Requirements: + - pandas + - os + + Example: + >>> df = f_730('/path/to/data/directory', ['file1.csv', 'file2.csv', 'file3.csv']) + >>> print(df.head()) + Name Age Gender + 0 Simon 5 Male + 1 Bobby 32 Male + 0 Elena 13 Female + 1 Tom 23 Male + 0 Franko 12 Male + """ + merged_df = pd.DataFrame() + + for file in csv_files: + file_path = os.path.join(data_dir, file) + df = pd.read_csv(file_path) + merged_df = pd.concat([merged_df, df], ignore_index=True) + + return merged_df + + +import unittest +import pandas as pd +import os +import shutil +import tempfile + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + # Create a temporary directory to hold CSV files + self.test_dir = tempfile.mkdtemp() + self.files = { + 'file1.csv': pd.DataFrame({ + 'Name': ['Alice', 'Bob'], + 'Age': [25, 30] + }), + 'file2.csv': pd.DataFrame({ + 'Name': ['Charlie'], + 'Age': [35] + }), + 'file3.csv': pd.DataFrame({ + 'Name': ['David', 'Eve'], + 'Age': [45, 55], + 'Gender': ['Male', 'Female'] + }), + 'file4.csv': pd.DataFrame({ + 'Name': ['Faythe'], + 'Animal': ['Cat'] + }) + } + # Write files to disk + for filename, df in self.files.items(): + df.to_csv(os.path.join(self.test_dir, filename), index=False) + + def tearDown(self): + # Clean up the temporary directory + shutil.rmtree(self.test_dir) + + def test_with_multiple_files(self): + # Test merging multiple files + result = f_730(self.test_dir, ['file1.csv', 'file2.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file2.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + + def test_with_different_columns(self): + # Test files with different columns + result = f_730(self.test_dir, ['file1.csv', 'file3.csv', 'file4.csv']) + expected_df = pd.concat([self.files['file1.csv'], self.files['file3.csv'], self.files['file4.csv']], + ignore_index=True) + pd.testing.assert_frame_equal(result, expected_df) + + def test_with_empty_list(self): + # Test with an empty list of files + result = f_730(self.test_dir, []) + self.assertTrue(result.empty) + + def test_with_nonexistent_file(self): + # Test referencing a non-existent file + with self.assertRaises(FileNotFoundError): + f_730(self.test_dir, ['nonexistent.csv']) + + def test_single_file(self): + # Test with a single file + result = f_730(self.test_dir, ['file2.csv']) + expected_df = self.files['file2.csv'] + pd.testing.assert_frame_equal(result, expected_df) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_731_data_simon/empty.csv b/data/raw/f_731_data_simon/empty.csv deleted file mode 100644 index e69de29b..00000000 diff --git a/data/raw/f_731_data_simon/mock_data1.csv b/data/raw/f_731_data_simon/mock_data1.csv deleted file mode 100644 index 40ceddc0..00000000 --- a/data/raw/f_731_data_simon/mock_data1.csv +++ /dev/null @@ -1,6 +0,0 @@ -Name,Age,Salary -Alice,25.0,50000.0 -Bob,,55000.0 -Charlie,30.0, -David,,60000.0 -Eve,27.0, diff --git a/data/raw/f_731_data_simon/mock_data2.csv b/data/raw/f_731_data_simon/mock_data2.csv deleted file mode 100644 index 8dfb19ff..00000000 --- a/data/raw/f_731_data_simon/mock_data2.csv +++ /dev/null @@ -1,5 +0,0 @@ -Animal, Size, Weight -Cat, 1, 10 -Dog, 4, 40 -, 12, 21 -Fly, 0.1, \ No newline at end of file diff --git a/data/raw/f_731_data_simon/strings.csv b/data/raw/f_731_data_simon/strings.csv deleted file mode 100644 index d7185223..00000000 --- a/data/raw/f_731_data_simon/strings.csv +++ /dev/null @@ -1,5 +0,0 @@ -A, B -Test, Hi -Test2, Hello -, Hola -Test3, \ No newline at end of file diff --git a/data/raw/f_731_simon.py b/data/raw/f_731_simon.py deleted file mode 100644 index 8b5d72b1..00000000 --- a/data/raw/f_731_simon.py +++ /dev/null @@ -1,113 +0,0 @@ -import os -import pandas as pd -import numpy as np - -def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: - """ - Load a CSV file into a pandas DataFrame and replace the NaN values in - numeric columns with the mean of the corresponding column. - The resulting DataFrame is returned. - - If an empty csv is passed, an empty DataFrame is returned. - - Parameters: - - data_dir (str): The path to the directory containing the CSV file. - - csv_file (str): The name of the CSV file to be processed. - - Returns: - pd.DataFrame: A pandas DataFrame with the processed data. - - Raises: - FileNotFoundError: If csv_file does not exist. - - Requirements: - - os - - pandas - - numpy - - Example: - >>> df = f_731("/path/to/data/directory", "file.csv") - >>> print(df) - Fruit Taste Cost - 0 Apple Good 1 - 1 Orange NaN 2 - 2 Avocado Bad 1.667 - 3 Coconut Tasty 2 - - >>> df = f_731("/path/to/data/directory", "test.csv") - >>> print(df) - Name Score - 0 Alex 25.2 - 1 Tanja 31.5 - 2 Maine 99 - 3 Lisa 100 - 4 Simone 63.925 - """ - file_path = os.path.join(data_dir, csv_file) - try: - df = pd.read_csv(file_path) - except pd.errors.EmptyDataError: - return pd.DataFrame() - - - - for column in df.columns: - if np.issubdtype(df[column].dtype, np.number): # checking for numeric columns - df[column].fillna(df[column].mean(), inplace=True) - - return df - -import os -import pandas as pd -import numpy as np -import unittest - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -class TestCases(unittest.TestCase): - - def setUp(self): - self.folder_path = 'f_731_data_simon' - - def test_case_1(self): - df_result = f_731(self.folder_path, "mock_data1.csv") - self.assertEqual(df_result["Age"].isnull().sum(), 0) - self.assertEqual(df_result["Salary"].isnull().sum(), 0) - self.assertEqual(df_result["Age"][1], 27.333333333333332) - self.assertEqual(df_result["Salary"][2], 55000) - - def test_case_2(self): - df_result = f_731(self.folder_path, "mock_data2.csv") - - exp = pd.DataFrame( - {'Animal': {0: 'Cat', 1: 'Dog', 2: np.nan, 3: 'Fly'}, - ' Size': {0: 1.0, 1: 4.0, 2: 12.0, 3: 0.1}, - ' Weight': {0: 10.0, 1: 40.0, 2: 21.0, 3: 23.666666666666668}} - ) - - pd.testing.assert_frame_equal(df_result, exp) - - def test_case_3(self): - # empty csv - res = f_731(self.folder_path, "empty.csv") - self.assertTrue(res.empty) - - def test_case_4(self): - # non existing csv - self.assertRaises(Exception, f_731, self.folder_path, "non_existing.csv") - - def test_case_5(self): - # only strings - df_result = f_731(self.folder_path, "strings.csv") - exp = pd.DataFrame( - {'A': {0: 'Test', 1: 'Test2', 2: np.nan, 3: 'Test3'}, - ' B ': {0: ' Hi', 1: ' Hello', 2: ' Hola', 3: np.nan}} - ) - pd.testing.assert_frame_equal(df_result, exp) - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_731_simon_chien_edit.py b/data/raw/f_731_simon_chien_edit.py new file mode 100644 index 00000000..b866cf25 --- /dev/null +++ b/data/raw/f_731_simon_chien_edit.py @@ -0,0 +1,142 @@ +import os +import pandas as pd +import numpy as np + + +def f_731(data_dir: str, csv_file: str) -> pd.DataFrame: + """ + Load a CSV file into a pandas DataFrame and replace the NaN values in + numeric columns with the mean of the corresponding column. + The resulting DataFrame is returned. + + If an empty csv is passed, an empty DataFrame is returned. + + Parameters: + - data_dir (str): The path to the directory containing the CSV file. + - csv_file (str): The name of the CSV file to be processed. + + Returns: + pd.DataFrame: A pandas DataFrame with the processed data. + + Raises: + FileNotFoundError: If csv_file does not exist. + + Requirements: + - os + - pandas + - numpy + + Example: + >>> df = f_731("/path/to/data/directory", "file.csv") + >>> print(df) + Fruit Taste Cost + 0 Apple Good 1 + 1 Orange NaN 2 + 2 Avocado Bad 1.667 + 3 Coconut Tasty 2 + """ + file_path = os.path.join(data_dir, csv_file) + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return pd.DataFrame() + + for column in df.columns: + if np.issubdtype(df[column].dtype, np.number): # checking for numeric columns + df[column].fillna(df[column].mean(), inplace=True) + + return df + + +import unittest +import pandas as pd +import numpy as np +import os +import tempfile +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + + def setUp(self): + self.folder_path = 'f_731_data_simon' + + def setUp(self): + # Create a temporary directory for test data + self.test_dir = tempfile.mkdtemp() + + def tearDown(self): + # Remove the temporary directory after the test + shutil.rmtree(self.test_dir) + + def create_csv(self, filename, data): + # Helper method to create a CSV file + filepath = os.path.join(self.test_dir, filename) + data.to_csv(filepath, index=False) + return filename + + def test_empty_csv(self): + # Test with an empty CSV file + filename = self.create_csv('empty.csv', pd.DataFrame()) + result = f_731(self.test_dir, filename) + self.assertTrue(result.empty) + + def test_numeric_columns_nan_replacement(self): + data = pd.DataFrame({ + 'Age': [25, np.nan, 30], + 'Salary': [50000, 60000, np.nan] + }) + filename = self.create_csv('data.csv', data) + expected = pd.DataFrame({ + 'Age': [25.0, 27.5, 30.0], # Ensure all ages are floats + 'Salary': [50000.0, 60000.0, 55000.0] # Ensure all salaries are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + + def test_mixed_columns(self): + data = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [np.nan, 88, 92] + }) + filename = self.create_csv('mixed.csv', data) + expected = pd.DataFrame({ + 'Name': ['Alice', 'Bob', 'Charlie'], + 'Score': [90.0, 88.0, 92.0] # Ensure all scores are floats + }) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, expected) + + def test_all_nan_column(self): + # Test with a column that is entirely NaN + data = pd.DataFrame({ + 'Empty': [np.nan, np.nan, np.nan] + }) + filename = self.create_csv('all_nan.csv', data) + result = f_731(self.test_dir, filename) + self.assertTrue(result['Empty'].isnull().all()) + + def test_no_numeric_data(self): + # Test a CSV file with no numeric data + data = pd.DataFrame({ + 'City': ['New York', 'Los Angeles', 'Chicago'] + }) + filename = self.create_csv('cities.csv', data) + result = f_731(self.test_dir, filename) + pd.testing.assert_frame_equal(result, data) + + def test_file_not_found(self): + # Test the FileNotFoundError + with self.assertRaises(FileNotFoundError): + f_731(self.test_dir, "non_existent.csv") + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_732_data_simon/test_file1.csv b/data/raw/f_732_data_simon/test_file1.csv deleted file mode 100644 index 583ccf3b..00000000 --- a/data/raw/f_732_data_simon/test_file1.csv +++ /dev/null @@ -1,15 +0,0 @@ -Name,Email,Phone,Address -Steven Wright Jr.,cindy60@ramirez.info,(360)444-9308,"725 Jermaine Terrace Apt. 621, West Traceybury, UT 98155" -Kelsey Massey,cdavis@freeman.info,(783)780-9456,"372 Mark Mountains, West Michele, AR 89852" -Cindy Reed,kennethhall@wilson-thomas.net,001-403-811-2708,"7697 Jones Curve, Nicholsonland, KS 62232" -Monica Collins,maryhayes@gmail.com,(203)746-7408,"6609 Jessica Gardens Apt. 069, Littleside, VT 33168" -Daniel Cunningham,englishmegan@garrett.com,979-524-6450,"0512 Robertson Route Suite 331, Jamesville, AR 46574" -Kathleen Mayer,snydertravis@edwards.com,7155657740,"46777 Miller Summit, Port Stephanie, PA 07798" -James Davis,travisbrooks@lynn-melendez.org,977-282-7560x78550,"331 Patricia Drives, Lake Christopher, NY 46752" -Willie Graham,fwashington@gmail.com,532.504.4012x9637,"76462 Kayla Plains Apt. 568, Melissaport, VT 78349" -Shawn Gaines,hwise@yahoo.com,+1-864-357-3082,"121 Perry Stream, Gambleborough, NE 64939" -Heather Fisher,carsonalison@hotmail.com,+1-511-999-0316x590,"6275 Moore Dam, Brownshire, NJ 96750" -Peter Peters,jon71@roberson-stevens.info,(478)974-2465x1030,"5403 Kyle Wells, Greenfort, SC 57413" -Joseph Long,jenny55@young.org,327.632.3906x538,"78918 Walker Garden Apt. 348, East Timothy, NM 59889" -Kelsey Ward,scottdaniel@davis.com,5153897738,"4403 Clark Island Apt. 444, West Krystalfurt, FL 94214" -Sandra Gomez,matthew20@gmail.com,728.530.0363x69972,"20251 Patricia Mountain, Watsonland, MS 38546" diff --git a/data/raw/f_732_data_simon/test_file2.csv b/data/raw/f_732_data_simon/test_file2.csv deleted file mode 100644 index 9fde0110..00000000 --- a/data/raw/f_732_data_simon/test_file2.csv +++ /dev/null @@ -1,12 +0,0 @@ -Name,Email,Phone,Address -Michael Fields,elizabethross@gmail.com,000.514.1690,"903 Hensley Village Apt. 601, Tuckerchester, DE 31252" -Allison Werner,heather90@hotmail.com,+1-868-202-3593,"5635 Payne Glen Apt. 811, Port Melissabury, NE 71023" -Lori Bowman,taylorlarry@gmail.com,+1-050-073-0894,"947 Goodman Circle, Karenburgh, NH 09584" -William Carpenter,radams@arnold.com,001-206-838-8971x344,"317 Brown Throughway Suite 331, Port Kimborough, AL 62433" -Martin Robinson,william96@hotmail.com,+1-698-314-4469x3114,"USNV Roach, FPO AA 57563" -Brenda Macias,molinaalison@hotmail.com,292.421.9119,"31263 Griffin Branch, North Mariochester, LA 53755" -Dr. Ryan Shaw,rachelfitzgerald@yahoo.com,001-617-165-0076x182,"06263 Karl Fords Apt. 322, Gallegoston, WY 71885" -Emily Pearson,uwatson@gmail.com,651.962.5077x42123,"680 Carter Mission Apt. 700, New Hannah, GA 34058" -Dr. Mary Washington MD,wesleyherman@morgan-humphrey.com,(577)273-7027,"15539 Robert Road Suite 218, Burnsside, NM 50761" -Sheila Frye,jlee@jackson.org,(087)237-5276x7372,"6405 Mccarthy Motorway, West Cole, DC 16898" -Benjamin Wood,jgardner@james.com,(163)324-1870,"134 Stokes Squares, Port Jennifer, MO 38009" diff --git a/data/raw/f_732_data_simon/test_file3.csv b/data/raw/f_732_data_simon/test_file3.csv deleted file mode 100644 index bd118736..00000000 --- a/data/raw/f_732_data_simon/test_file3.csv +++ /dev/null @@ -1,15 +0,0 @@ -Name,Email,Phone,Address -Bradley Yang,jeffreykelly@stewart.com,554-103-7842,"069 Michelle Street Suite 611, Owensberg, NC 68063" -Nicholas Meadows,burtonjoann@gmail.com,001-576-361-5408,"4841 Russell Branch, Jameshaven, HI 44475" -Aaron Graves,zavalahector@reed-garza.com,863-629-6049x059,"94836 Phelps Knoll Suite 692, Lake Richardton, NJ 18496" -Heather Knox,christopher65@yahoo.com,(794)817-3854x38809,"06689 Jason Manor Suite 364, South Jeffrey, NH 82878" -Julie Gutierrez,hughesmatthew@yahoo.com,+1-436-144-1901x565,"4088 Ruiz Lakes Suite 535, South Rebecca, TN 43350" -Catherine Parks,brian27@mcclure.com,(085)792-5067x3398,"99004 Kathleen Field Apt. 592, North Jamesland, NH 34587" -Laura Blackburn,lisacarpenter@hotmail.com,496.926.9382,"9526 Patrick Divide Suite 271, Davisfurt, WA 50313" -Olivia Johnson,harperkara@gmail.com,984-429-9699x217,"45457 Mario Parkways, North Georgemouth, NJ 82861" -Michael Spencer,carmen50@yahoo.com,005-890-9692x269,"79918 Schwartz Lake Suite 824, Lake Tonya, AL 79084" -Michael Bowman,shannondonna@robinson.com,610-217-4860,"661 Collins Divide Suite 341, Espinozaview, KY 21217" -Paul Peters,cookejose@carter-davis.com,+1-387-009-2815x77284,"613 Cook Gateway Suite 681, Port Danaborough, IN 53406" -Patrick Moses,kelly30@hotmail.com,001-044-856-6328x6972,"052 Mccullough Path Suite 483, North Matthew, WY 06009" -Jason Simpson,hjames@gmail.com,515-713-2495x9978,"USNS Phillips, FPO AP 36912" -Nicole Howe,lrobinson@hotmail.com,080.539.3069,"45851 Cooper Junction, Carolynbury, ID 81540" diff --git a/data/raw/f_732_data_simon/test_file4.csv b/data/raw/f_732_data_simon/test_file4.csv deleted file mode 100644 index 0a56f64f..00000000 --- a/data/raw/f_732_data_simon/test_file4.csv +++ /dev/null @@ -1,13 +0,0 @@ -Name,Email,Phone,Address -Erika Gonzalez,yatesmegan@solomon.com,590-706-1071,"9518 Kelly Hills, Brownland, OH 49173" -Jason Gregory,wilsonjoshua@yahoo.com,088-310-5171x5237,"9861 Baker Road Apt. 554, Wolfeport, WY 28463" -Ashley Armstrong,royyoung@yahoo.com,(296)655-3969x6321,"37390 Adams Key Suite 576, Lake Bethmouth, MS 44319" -Jade Gonzalez,joseph06@miller.com,7771257685,"6696 Julie Trafficway Apt. 068, Port Georgeview, FL 27184" -Melody Jones,brandon05@gmail.com,001-159-324-3764x89282,"1772 Fernandez Brooks, Jamesfurt, NV 16176" -Thomas Sellers,ythomas@yahoo.com,(469)646-0738,"83078 Blake Branch, Lewisville, GA 93386" -Stephen Butler,scott10@gmail.com,+1-032-201-9177x1816,"546 Casey Highway, Jessicatown, AL 89849" -Michael Wilson,greenashley@yahoo.com,001-030-775-5772,"884 Washington Vista, West Reneefurt, HI 09925" -William Beltran,michellegriffith@james.com,419-201-1359x1867,"6307 Pena Rapid, Port Jasonton, ME 73009" -Timothy Guzman,ronald12@yahoo.com,737.058.1806,"40862 Ramirez Village, Thomasburgh, NE 18354" -Andrew Mccarthy,christianriggs@hotmail.com,001-229-160-7591x6108,"64291 Casey Ford, East Judyborough, AK 02542" -Samantha Ward,matthew35@landry-malone.com,001-389-379-3254x96873,"0523 Khan Freeway Apt. 555, North Danielle, KY 99558" diff --git a/data/raw/f_732_data_simon/test_file5.csv b/data/raw/f_732_data_simon/test_file5.csv deleted file mode 100644 index cec11991..00000000 --- a/data/raw/f_732_data_simon/test_file5.csv +++ /dev/null @@ -1,19 +0,0 @@ -Name,Email,Phone,Address -Edward Powell,tylerbailey@henry.com,001-832-761-8289x3331,"106 Smith Vista, Lake Nathan, ID 63705" -Megan Roberts,johnsjessica@yahoo.com,+1-446-364-9337,"418 Taylor Ridges, Ryanshire, NJ 75831" -Eric Burnett,maurice75@gmail.com,+1-807-407-9129x74754,"501 Donna Field, West David, NY 43915" -Shawn Mejia,leslie29@yahoo.com,551-477-3235x3186,"63027 Tara Corner Suite 826, New Kristen, SD 56313" -Katrina Gutierrez,josebyrd@harrell.com,857.115.1465x439,"42160 Timothy Via, Michealfort, MD 41080" -Teresa Hamilton,ericbaker@brown.net,+1-404-802-0206x5882,"1706 Nathan Fall, Jenniferfort, UT 13751" -John King,nklein@johnson.com,(356)273-6960x8234,"065 Wood Estate, Port Shane, OH 98757" -Amy Bennett,millereddie@nolan.com,373-767-3018,"5363 Benjamin Square Apt. 003, New James, GA 84413" -Rhonda Hughes,jasonhawkins@hotmail.com,(515)521-5112x308,"984 Bryan Branch Suite 427, Dunnmouth, UT 37790" -Kayla Jones,evelynfletcher@lowe.org,001-986-703-7005x463,"41258 Robert Islands, Kentshire, MS 53385" -Martha Poole,reyesmichelle@lopez.com,001-247-932-4947x55597,"4870 Martha Turnpike Apt. 064, Walkerburgh, MS 92286" -Juan Jones,vmullins@gmail.com,(048)894-0225x0991,"990 James Bridge Apt. 413, Port Hannahhaven, RI 74341" -Kimberly Diaz,xjohnson@thompson-carrillo.com,(058)556-6103x347,"478 Robert Estate Apt. 435, East Ritamouth, MN 68820" -Jessica Washington,sharongeorge@sanders.com,444.240.8607x0538,"587 Tina Rapids Apt. 771, South Colleen, WY 25823" -Thomas Harris,prattalicia@gmail.com,001-145-103-0726x73954,"41234 Jacqueline Parkway Apt. 889, West Kristenchester, MN 04454" -Margaret Gutierrez,pmartin@trujillo-hunter.biz,(076)927-3279x75028,"543 Katherine Stravenue Apt. 467, South Bonniestad, OR 38371" -Christine Good,hendersonkyle@hotmail.com,001-117-849-7733x285,"PSC 7864, Box 6329, APO AA 33122" -John Smith,danieldale@daniels.net,001-440-429-8501x2235,"629 Park Pass, New Susanberg, CA 25099" diff --git a/data/raw/f_732_simon.py b/data/raw/f_732_simon.py deleted file mode 100644 index f6429dde..00000000 --- a/data/raw/f_732_simon.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -import random -import pandas as pd - -def f_732(data_dir, - csv_files=['file1.csv', 'file2.csv', 'file3.csv'], - seed=None): - """ - Randomly select one of the provided csv_files and select a certain number - of records from the file at random. - The selected records are returned in a DataFrame. - The name of the selected csv_file is also returned. - - If the csv_file is empty return an empty DataFrame. - - Parameters: - data_dir (str): The directory where the CSV files are located. - csv_files (list of str): The list of CSV files to choose from. Default is ['file1.csv', 'file2.csv', 'file3.csv']. - seed (int, optional): Seed for random number generation and for sampling from the csv. - - Returns: - tuple: A tuple containing two elements: - - str: The name of the randomly selected file. - - DataFrame: A pandas DataFrame with the selected rows. - - Requirements: - - os - - random - - pandas - - Example: - >>> file_name, df = f_732('test_data') - >>> print(file_name) - 'file2.csv' - >>> print(df) - Animal Weight - 0 Cat 1 - 21 Mouse 12 - 15 Elephant 1000 - 2 Tiger 500 - - >>> file_name, df = f_732('data', csv_files=['test1.csv', 'test2.csv'], seed=42) - >>> print(file_name) - 'test1.csv' - >>> print(df) - Name House Salary - 12 Simba mansion 11111 - 231 Dolores mansion 2222 - 135 Elaine shed 93274 - 21 Sophia garden 111 - """ - - random.seed(seed) - - file = csv_files[random.randint(0, len(csv_files)-1)] - file_path = os.path.join(data_dir, file) - - try: - df = pd.read_csv(file_path) - except pd.errors.EmptyDataError: - return file, pd.DataFrame() - - selected_rows = df.sample(n=random.randint(1, len(df)), random_state=seed) - - return file, selected_rows - -import unittest -import pandas as pd -import os - -def run_tests(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -class TestCases(unittest.TestCase): - - DATA_DIR = os.path.join('f_732_data_simon') - - def test_rng(self): - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file1.csv', 'test_file2.csv'], seed=12) - file_name2, df2 = f_732(data_dir=self.DATA_DIR, csv_files=['test_file1.csv', 'test_file2.csv'], seed=12) - - self.assertEqual(file_name, file_name2) - pd.testing.assert_frame_equal(df, df2) - - def test_case_1(self): - # Tests the function with only f_732_data_simon/test_file1.csv and checks if the returned file name matches - # and if the dataframe is not empty. - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file1.csv'], seed=12) - self.assertEqual(file_name, 'test_file1.csv') - self.assertIsInstance(df, pd.DataFrame) - self.assertNotEqual(len(df), 0) - csv = pd.read_csv(os.path.join(self.DATA_DIR, 'test_file1.csv')) - self.assertTrue(all(df.isin(csv))) - - def test_case_2(self): - # Similar to test_case_1 but with f_732_data_simon/test_file2.csv. - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file2.csv'], seed=1212) - self.assertEqual(file_name, 'test_file2.csv') - self.assertIsInstance(df, pd.DataFrame) - self.assertNotEqual(len(df), 0) - csv = pd.read_csv(os.path.join(self.DATA_DIR, 'test_file2.csv')) - self.assertTrue(all(df.isin(csv))) - - - def test_case_3(self): - # Similar to test_case_1 but with f_732_data_simon/test_file3.csv. - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file3.csv'], seed=127) - self.assertEqual(file_name, 'test_file3.csv') - self.assertIsInstance(df, pd.DataFrame) - self.assertNotEqual(len(df), 0) - csv = pd.read_csv(os.path.join(self.DATA_DIR, 'test_file3.csv')) - self.assertTrue(all(df.isin(csv))) - - - def test_case_4(self): - # Tests the function with all CSV files and checks if the returned file name is one of the available files - # and if the dataframe is not empty. - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file1.csv', 'test_file2.csv', 'test_file3.csv', 'test_file4.csv', 'test_file5.csv'], seed=152) - self.assertIn(file_name, ['test_file1.csv', 'test_file2.csv', 'test_file3.csv', 'test_file4.csv', 'test_file5.csv']) - self.assertIsInstance(df, pd.DataFrame) - self.assertNotEqual(len(df), 0) - - def test_case_5(self): - # Tests the function with f_732_data_simon/test_file4.csv and f_732_data_simon/test_file5.csv and checks if the returned file name matches - # one of them and if the dataframe is not empty. - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['test_file4.csv', 'test_file5.csv'], seed=141) - self.assertIn(file_name, ['test_file4.csv', 'test_file5.csv']) - self.assertIsInstance(df, pd.DataFrame) - self.assertNotEqual(len(df), 0) - - def test_empty_csv(self): - file_name, df = f_732(data_dir=self.DATA_DIR, csv_files=['empty.csv'], seed=42) - self.assertEqual(file_name, 'empty.csv') - self.assertTrue(df.empty) - - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_732_simon_chien_edit.py b/data/raw/f_732_simon_chien_edit.py new file mode 100644 index 00000000..ef6a289c --- /dev/null +++ b/data/raw/f_732_simon_chien_edit.py @@ -0,0 +1,133 @@ +import os +import random +import pandas as pd + + +def f_732(data_dir, + csv_files=['file1.csv', 'file2.csv', 'file3.csv'], + seed=None): + """ + Randomly select one of the provided csv_files and select a certain number + of records from the file at random. + The selected records are returned in a DataFrame. + The name of the selected csv_file is also returned. + + If the csv_file is empty return an empty DataFrame. + + Parameters: + data_dir (str): The directory where the CSV files are located. + csv_files (list of str): The list of CSV files to choose from. Default is ['file1.csv', 'file2.csv', 'file3.csv']. + seed (int, optional): Seed for random number generation and for sampling from the csv. + + Returns: + tuple: A tuple containing two elements: + - str: The name of the randomly selected file. + - DataFrame: A pandas DataFrame with the selected rows. + + Requirements: + - os + - random + - pandas + + Example: + >>> file_name, df = f_732('test_data') + >>> print(file_name) + 'file2.csv' + >>> print(df) + Animal Weight + 0 Cat 1 + 21 Mouse 12 + 15 Elephant 1000 + 2 Tiger 500 + """ + + random.seed(seed) + + file = csv_files[random.randint(0, len(csv_files) - 1)] + file_path = os.path.join(data_dir, file) + + try: + df = pd.read_csv(file_path) + except pd.errors.EmptyDataError: + return file, pd.DataFrame() + + selected_rows = df.sample(n=random.randint(1, len(df)), random_state=seed) + + return file, selected_rows + + +import unittest +import pandas as pd +import os +import tempfile +import shutil + + +def run_tests(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary directory + self.test_dir = tempfile.mkdtemp() + self.test_files = [ + 'file1.csv', 'file2.csv', 'file3.csv', 'file4.csv', 'file5.csv', 'empty.csv' + ] + # Sample data for CSV files + data = { + 'file1.csv': pd.DataFrame({'Name': ['Alice', 'Bob'], 'Age': [25, 30]}), + 'file2.csv': pd.DataFrame({'Name': ['Chris', 'Dana'], 'Age': [35, 40]}), + 'file3.csv': pd.DataFrame({'Name': ['Eve', 'Frank'], 'Age': [45, 50]}), + 'file4.csv': pd.DataFrame({'Name': ['Grace', 'Hank'], 'Age': [55, 60]}), + 'file5.csv': pd.DataFrame({'Name': ['Ivan', 'Julia'], 'Age': [65, 70]}), + 'empty.csv': pd.DataFrame() + } + # Create CSV files in the directory + for file_name, df in data.items(): + df.to_csv(os.path.join(self.test_dir, file_name), index=False) + + def tearDown(self): + # Remove the directory after the test + shutil.rmtree(self.test_dir) + + def test_random_selection(self): + # Testing random selection and ensuring the file chosen and its data are correct + file_name, df = f_732(self.test_dir, seed=42) + self.assertTrue(file_name in self.test_files) + self.assertFalse(df.empty) + + def test_specific_file_selection(self): + # Test selecting a specific file and checking contents + file_name, df = f_732(self.test_dir, ['file1.csv'], seed=42) + expected = pd.read_csv(os.path.join(self.test_dir, 'file1.csv')) + # Sample from expected and reset index + expected_sampled = expected.sample(len(df), random_state=42).reset_index(drop=True) + # Reset index of df to ensure indices match + df_reset = df.reset_index(drop=True) + # Assert frame equality + pd.testing.assert_frame_equal(df_reset, expected_sampled) + + def test_empty_file(self): + # Ensure an empty file returns an empty DataFrame + file_name, df = f_732(self.test_dir, ['empty.csv'], seed=42) + self.assertEqual(file_name, 'empty.csv') + self.assertTrue(df.empty) + + def test_multiple_files(self): + # Testing selection from multiple files + file_name, df = f_732(self.test_dir, ['file3.csv', 'file4.csv'], seed=24) + self.assertIn(file_name, ['file3.csv', 'file4.csv']) + self.assertFalse(df.empty) + + def test_no_file_matches(self): + # Testing behavior when no files match the list + with self.assertRaises(FileNotFoundError): + f_732(self.test_dir, ['nonexistent.csv'], seed=42) + + +if __name__ == "__main__": + run_tests() diff --git a/data/raw/f_733_data_simon/mock_data.csv b/data/raw/f_733_data_simon/mock_data.csv deleted file mode 100644 index d66feb79..00000000 --- a/data/raw/f_733_data_simon/mock_data.csv +++ /dev/null @@ -1,101 +0,0 @@ -id,feature_1,feature_2,feature_3,target -278,500.87,22.5,9843.11,95.74 -64,846.32,30.21,8593.27,46.79 -313,281.31,54.24,7359.29,90.33 -654,357.44,33.12,6504.13,68.54 -564,81.74,33.9,731.22,75.66 -427,287.36,78.9,1514.34,45.49 -351,908.72,0.7,1149.85,56.6 -475,814.58,41.84,8118.78,88.76 -339,832.92,73.1,2535.51,56.27 -507,487.19,2.33,3200.7,28.81 -76,674.5,26.41,1942.59,90.15 -416,745.32,14.94,7252.12,29.8 -720,26.74,0.8,2426.53,47.5 -816,37.81,38.36,9977.95,99.74 -272,890.91,94.5,5292.6,50.15 -570,177.4,33.88,9607.34,37.82 -346,782.13,48.79,111.11,50.59 -309,377.16,59.7,1497.96,65.93 -16,759.41,30.68,4018.75,92.36 -736,739.78,1.32,5421.25,18.8 -232,755.12,67.4,5575.34,5.35 -758,584.91,87.35,2016.16,55.8 -651,486.16,25.65,1096.87,2.8 -265,731.57,25.62,3892.54,24.65 -822,636.95,55.53,5899.31,89.31 -535,626.9,44.55,5977.73,17.33 -74,378.79,86.76,5668.36,8.57 -879,950.28,18.69,3337.93,68.86 -616,826.64,20.39,1410.61,78.43 -669,459.89,86.22,9158.23,21.99 -802,427.84,42.86,7750.82,16.96 -411,783.79,39.38,9999.93,98.22 -247,223.12,97.41,727.79,88.86 -789,785.15,93.79,7006.7,52.62 -1,62.37,1.82,5043.35,58.19 -922,294.83,65.54,858.59,71.54 -684,365.2,47.7,2106.8,14.4 -281,573.16,32.18,7800.57,99.22 -956,505.64,98.48,185.91,14.79 -175,212.94,26.44,7753.34,8.45 -551,900.13,14.92,7463.66,52.78 -520,370.75,9.54,6785.28,5.68 -854,144.97,50.3,6257.86,19.28 -647,894.81,93.6,4161.15,38.0 -593,452.17,42.24,1868.85,67.91 -391,295.93,71.5,7267.11,89.86 -191,151.1,6.28,442.47,47.48 -10,411.59,22.47,84.55,2.93 -837,409.79,74.31,4241.42,24.83 -619,951.77,62.3,6218.35,28.88 -706,504.83,53.13,6158.94,69.7 -263,923.39,45.58,9162.5,7.89 -168,215.64,76.29,7599.97,34.51 -416,171.21,95.6,7247.92,11.23 -702,242.4,42.1,5283.95,93.9 -497,443.8,36.63,8879.64,68.89 -577,341.75,0.75,2456.56,38.85 -331,278.75,30.4,7782.2,85.26 -418,535.48,23.67,4242.29,98.5 -459,369.72,2.91,4576.5,28.42 -586,895.85,48.57,3229.1,31.19 -932,217.11,14.86,3216.66,57.56 -550,823.7,58.16,9405.61,78.59 -344,529.13,52.38,3763.45,10.44 -226,110.62,84.69,8573.91,88.35 -65,895.91,46.83,4117.18,33.11 -898,162.38,16.1,7438.19,0.45 -390,254.93,93.42,6887.81,69.92 -27,520.11,57.5,4737.31,46.78 -454,900.4,26.98,4264.2,35.96 -527,721.15,98.96,9500.87,16.95 -464,989.68,27.35,9645.46,44.51 -1000,24.85,44.7,1241.9,71.97 -81,967.21,89.81,7732.99,42.42 -125,905.3,49.52,2156.14,98.23 -23,672.16,18.94,4835.71,22.35 -182,427.46,80.19,1774.29,85.16 -638,85.2,90.84,3397.57,72.29 -272,802.76,71.96,2702.65,31.61 -797,222.4,37.28,1691.25,79.56 -274,772.81,89.97,724.44,91.82 -374,751.99,47.53,8030.87,69.72 -930,125.96,76.56,2302.5,82.99 -525,739.6,78.19,1537.5,5.31 -348,981.23,81.52,5860.78,11.73 -799,389.9,44.8,2132.9,47.1 -343,250.86,62.82,2349.61,10.19 -855,539.68,45.5,4305.21,49.16 -178,158.13,63.82,3816.44,96.49 -941,379.65,99.12,2571.29,69.8 -850,306.6,40.1,6297.76,93.3 -199,770.1,34.95,7001.87,89.63 -616,385.12,28.73,5961.52,2.54 -485,442.49,93.49,5246.13,70.91 -934,985.54,87.51,4830.3,66.21 -98,480.2,3.54,1791.62,4.98 -835,211.28,38.54,7187.74,63.78 -144,641.56,62.1,6630.13,79.59 -215,762.87,13.73,2647.49,28.55 -989,615.6,29.71,9935.13,85.45 diff --git a/data/raw/f_733_data_simon/mock_data_with_non_numerical.csv b/data/raw/f_733_data_simon/mock_data_with_non_numerical.csv deleted file mode 100644 index 2ffac6d9..00000000 --- a/data/raw/f_733_data_simon/mock_data_with_non_numerical.csv +++ /dev/null @@ -1,11 +0,0 @@ -id,feature_1,feature_2,feature_3,target -220,131.61,68.64,5260.7,property -955,851.73,71.71,6540.8,ground -91,395.68,24.26,5318.52,member -855,375.21,29.74,7172.3,family -98,290.46,2.65,1487.1,court -444,244.69,21.81,4934.2,herself -64,58.85,60.9,2758.16,left -56,772.4,99.33,8353.5,machine -411,391.51,48.26,1278.85,road -370,945.45,81.99,4255.43,true diff --git a/data/raw/f_733_simon.py b/data/raw/f_733_simon.py deleted file mode 100644 index 16c9cf2c..00000000 --- a/data/raw/f_733_simon.py +++ /dev/null @@ -1,116 +0,0 @@ -import pandas as pd -from sklearn.linear_model import LinearRegression -from sklearn.model_selection import train_test_split - -def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): - ''' - Train a linear regression model on a dataset and predict the value of a particular attribute. - This function reads a CSV file to create a pandas DataFrame, separates the data into - training and testing sets, and performs linear regression. It returns the predicted - values for the testing set as well as the trained model. - - Parameters: - csv_file_path (str): The path to the CSV file containing the data set. - attribute (str): The attribute to predict. - test_size (float, optional): Proportion of the dataset to include in the test split. Default is 0.2. - random_state (int, optional): Seed used by the random number generator. Default is 42. - - Returns: - tuple: A tuple containing: - - model (LinearRegression): The trained linear regression model. - - predictions (ndarray): An array of predicted values for the test set. - - Requirements: - - pandas - - sklearn.linear_model - - sklearn.model_selection - - Example: - >>> model, predictions = f_733("/path/to/data.csv", "target") - >>> print(predictions) - [123.45, ..., 126.78] - - >>> model, predictions = f_733("/path/to/test.csv", "target") - >>> print(predictions) - [1.2423, 4.2313, 28.2219, 10.3092] - - Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. - ''' - df = pd.read_csv(csv_file_path) - X = df.drop(columns=[attribute]) - y = df[attribute] - - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=test_size, random_state=random_state - ) - - model = LinearRegression() - model.fit(X_train, y_train) - - predictions = model.predict(X_test) - return model, predictions - - -import unittest -import numpy as np -import os -from sklearn.linear_model import LinearRegression - -class TestCases(unittest.TestCase): - def setUp(self): - # Absolute path to the CSV file (modify this path to where you saved the f_733_data_simon/mock_data.csv file) - self.csv_file_path = os.path.join("f_733_data_simon", "mock_data.csv") # or provide the correct path - - def test_valid_data(self): - # Test the function with valid data and default parameters - model, predictions = f_733(csv_file_path=self.csv_file_path, attribute="target") - self.assertIsInstance(model, LinearRegression) # Check that a model is returned - self.assertIsNotNone(predictions) # Check that predictions are returned - self.assertIsInstance(predictions, np.ndarray) # Predictions should be a numpy array - self.assertTrue(len(predictions) > 0) # There should be at least one prediction - - def test_different_test_size(self): - # Test the function with a different test size - model, predictions = f_733(csv_file_path=self.csv_file_path, attribute="target", test_size=0.1) - self.assertIsInstance(model, LinearRegression) # Check that a model is returned - self.assertIsNotNone(predictions) # Check that predictions are returned - self.assertIsInstance(predictions, np.ndarray) # Predictions should be a numpy array - self.assertTrue(len(predictions) > 0) # There should be at least one prediction - - def test_different_test_size(self): - # check actual prediction s - model, predictions = f_733(csv_file_path=self.csv_file_path, attribute="target", test_size=0.05, random_state=12) - self.assertIsInstance(model, LinearRegression) # Check that a model is returned - self.assertIsNotNone(predictions) # Check that predictions are returned - self.assertIsInstance(predictions, np.ndarray) # Predictions should be a numpy array - exp = [50.79382830317221, 55.43165477762747, 55.243834423268666, 49.64829868690964, 51.81468104159754] - - for i, expected in enumerate(exp): - self.assertAlmostEqual(predictions[i], exp[i], places=2) - - def test_invalid_csv_path(self): - # Test the function with an invalid CSV path - with self.assertRaises(FileNotFoundError): - f_733(csv_file_path="non_existent.csv", attribute="target") - - def test_invalid_attribute(self): - # Test the function with an attribute not present in the CSV - with self.assertRaises(KeyError): - f_733(csv_file_path=self.csv_file_path, attribute="invalid_attribute") - - def test_predicting_non_numerical_data(self): - # If the CSV contains non-numerical data for the target, the function should handle it gracefully - # Note: This requires a separate CSV file with non-numerical target values. - with self.assertRaises(ValueError): - path = os.path.join("f_733_data_simon", "mock_data_with_non_numerical.csv") - f_733(csv_file_path=path, attribute="target") - -def run_tests(): - # Function to execute the test cases - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(TestCases)) - runner = unittest.TextTestRunner() - runner.run(suite) - -if __name__ == "__main__": - run_tests() \ No newline at end of file diff --git a/data/raw/f_733_simon_chien_edit.py b/data/raw/f_733_simon_chien_edit.py new file mode 100644 index 00000000..274c0103 --- /dev/null +++ b/data/raw/f_733_simon_chien_edit.py @@ -0,0 +1,122 @@ +import pandas as pd +from sklearn.linear_model import LinearRegression +from sklearn.model_selection import train_test_split + + +def f_733(csv_file_path, attribute, test_size=0.2, random_state=42): + """ + Train a linear regression model on a dataset and predict the value of a particular attribute. + This function reads a CSV file to create a pandas DataFrame, separates the data into + training and testing sets, and performs linear regression. It returns the predicted + values for the testing set as well as the trained model. + + Parameters: + csv_file_path (str): The path to the CSV file containing the data set. + attribute (str): The attribute to predict. + test_size (float, optional): Proportion of the dataset to include in the test split. Default is 0.2. + random_state (int, optional): Seed used by the random number generator. Default is 42. + + Returns: + tuple: A tuple containing: + - model (LinearRegression): The trained linear regression model. + - predictions (ndarray): An array of predicted values for the test set. + + Requirements: + - pandas + - sklearn.linear_model + - sklearn.model_selection + + Note: The function assumes that the CSV file is correctly formatted and that the specified attribute exists. + + Example: + >>> model, predictions = f_733("/path/to/data.csv", "target") + >>> print(predictions) + [123.45, ..., 126.78] + """ + df = pd.read_csv(csv_file_path) + X = df.drop(columns=[attribute]) + y = df[attribute] + + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=test_size, random_state=random_state + ) + + model = LinearRegression() + model.fit(X_train, y_train) + + predictions = model.predict(X_test) + return model, predictions + + +import unittest +import numpy as np +import pandas as pd +import tempfile +import os +from sklearn.linear_model import LinearRegression + + +class TestCases(unittest.TestCase): + def setUp(self): + # Create a temporary CSV file to simulate test environments + self.temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False, suffix='.csv') + self.csv_file_path = self.temp_file.name + self.temp_file.close() # Close the file immediately after creation + + def tearDown(self): + # Remove the temporary file after the test + os.unlink(self.csv_file_path) + + def create_csv(self, data, header=True): + # Utility to create CSV content + df = pd.DataFrame(data) + df.to_csv(self.csv_file_path, index=False, header=header) + + def test_valid_data(self): + # Valid CSV and attribute + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target") + self.assertIsInstance(model, LinearRegression) + self.assertIsInstance(predictions, np.ndarray) + self.assertEqual(len(predictions), 1) # 20% of 3 is 0.6, rounds to 1 + + def test_different_test_size(self): + # Changing the test size + data = {'feature1': range(10), 'feature2': range(10, 20), 'target': range(20, 30)} + self.create_csv(data) + model, predictions = f_733(self.csv_file_path, "target", test_size=0.3) + self.assertEqual(len(predictions), 3) # 30% of 10 is 3 + + def test_invalid_attribute(self): + # Attribute not present in the CSV + data = {'feature1': [1, 2], 'feature2': [3, 4]} + self.create_csv(data) + with self.assertRaises(KeyError): + f_733(self.csv_file_path, "nonexistent_target") + + def test_csv_with_missing_values(self): + # CSV containing missing values in features + data = {'feature1': [1, np.nan, 3], 'feature2': [4, 5, 6], 'target': [7, 8, 9]} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") + + def test_predicting_non_numerical_data(self): + # Non-numerical data in target + data = {'feature1': [1, 2, 3], 'feature2': [4, 5, 6], 'target': ['a', 'b', 'c']} + self.create_csv(data) + with self.assertRaises(ValueError): + f_733(self.csv_file_path, "target") + + +def run_tests(): + # Function to execute the test cases + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestCases)) + runner = unittest.TextTestRunner() + runner.run(suite) + + +if __name__ == "__main__": + run_tests()