From c535893ff4ca8fd3be958b5f7480850c937288d0 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Sun, 5 Mar 2017 11:24:02 -0800 Subject: [PATCH 01/31] PPA output --- final.csv | 157 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 157 insertions(+) create mode 100644 final.csv diff --git a/final.csv b/final.csv new file mode 100644 index 0000000..336b70f --- /dev/null +++ b/final.csv @@ -0,0 +1,157 @@ +LOCATION BOOKS_AND_REFERENCE,LOCATION BUSINESS,LOCATION COMMUNICATION,LOCATION EDUCATION,LOCATION ENTERTAINMENT,LOCATION FINANCE,LOCATION GAME_ACTION,LOCATION GAME_ADVENTURE,LOCATION GAME_ARCADE,LOCATION GAME_BOARD,LOCATION GAME_CARD,LOCATION GAME_CASINO,LOCATION GAME_CASUAL,LOCATION GAME_EDUCATIONAL,LOCATION GAME_PUZZLE,LOCATION GAME_ROLE_PLAYING,LOCATION GAME_SIMULATION,LOCATION GAME_SPORTS,LOCATION GAME_STRATEGY,LOCATION GAME_TRIVIA,LOCATION GAME_WORD,LOCATION HEALTH_AND_FITNESS,LOCATION LIBRARIES_AND_DEMO,LOCATION LIFESTYLE,LOCATION MEDIA_AND_VIDEO,LOCATION MEDICAL,LOCATION MUSIC_AND_AUDIO,LOCATION NEWS_AND_MAGAZINES,LOCATION PERSONALIZATION,LOCATION PHOTOGRAPHY,LOCATION PRODUCTIVITY,LOCATION SHOPPING,LOCATION SOCIAL,LOCATION SPORTS,LOCATION TOOLS,LOCATION TRANSPORTATION,LOCATION TRAVEL_AND_LOCAL,LOCATION WEATHER,CONTACTS BOOKS_AND_REFERENCE,CONTACTS BUSINESS,CONTACTS COMMUNICATION,CONTACTS EDUCATION,CONTACTS ENTERTAINMENT,CONTACTS FINANCE,CONTACTS GAME_ACTION,CONTACTS GAME_ADVENTURE,CONTACTS GAME_ARCADE,CONTACTS GAME_BOARD,CONTACTS GAME_CARD,CONTACTS GAME_CASINO,CONTACTS GAME_CASUAL,CONTACTS GAME_EDUCATIONAL,CONTACTS GAME_PUZZLE,CONTACTS GAME_ROLE_PLAYING,CONTACTS GAME_SIMULATION,CONTACTS GAME_SPORTS,CONTACTS GAME_STRATEGY,CONTACTS GAME_TRIVIA,CONTACTS GAME_WORD,CONTACTS HEALTH_AND_FITNESS,CONTACTS LIBRARIES_AND_DEMO,CONTACTS LIFESTYLE,CONTACTS MEDIA_AND_VIDEO,CONTACTS MEDICAL,CONTACTS MUSIC_AND_AUDIO,CONTACTS NEWS_AND_MAGAZINES,CONTACTS PERSONALIZATION,CONTACTS PHOTOGRAPHY,CONTACTS PRODUCTIVITY,CONTACTS SHOPPING,CONTACTS SOCIAL,CONTACTS SPORTS,CONTACTS TOOLS,CONTACTS TRANSPORTATION,CONTACTS TRAVEL_AND_LOCAL,CONTACTS WEATHER,SMS BOOKS_AND_REFERENCE,SMS BUSINESS,SMS COMMUNICATION,SMS EDUCATION,SMS ENTERTAINMENT,SMS FINANCE,SMS GAME_ACTION,SMS GAME_ADVENTURE,SMS GAME_ARCADE,SMS GAME_BOARD,SMS GAME_CARD,SMS GAME_CASINO,SMS GAME_CASUAL,SMS GAME_EDUCATIONAL,SMS GAME_PUZZLE,SMS GAME_ROLE_PLAYING,SMS GAME_SIMULATION,SMS GAME_SPORTS,SMS GAME_STRATEGY,SMS GAME_TRIVIA,SMS GAME_WORD,SMS HEALTH_AND_FITNESS,SMS LIBRARIES_AND_DEMO,SMS LIFESTYLE,SMS MEDIA_AND_VIDEO,SMS MEDICAL,SMS MUSIC_AND_AUDIO,SMS NEWS_AND_MAGAZINES,SMS PERSONALIZATION,SMS PHOTOGRAPHY,SMS PRODUCTIVITY,SMS SHOPPING,SMS SOCIAL,SMS SPORTS,SMS TOOLS,SMS TRANSPORTATION,SMS TRAVEL_AND_LOCAL,SMS WEATHER,PHONE BOOKS_AND_REFERENCE,PHONE BUSINESS,PHONE COMMUNICATION,PHONE EDUCATION,PHONE ENTERTAINMENT,PHONE FINANCE,PHONE GAME_ACTION,PHONE GAME_ADVENTURE,PHONE GAME_ARCADE,PHONE GAME_BOARD,PHONE GAME_CARD,PHONE GAME_CASINO,PHONE GAME_CASUAL,PHONE GAME_EDUCATIONAL,PHONE GAME_PUZZLE,PHONE GAME_ROLE_PLAYING,PHONE GAME_SIMULATION,PHONE GAME_SPORTS,PHONE GAME_STRATEGY,PHONE GAME_TRIVIA,PHONE GAME_WORD,PHONE HEALTH_AND_FITNESS,PHONE LIBRARIES_AND_DEMO,PHONE LIFESTYLE,PHONE MEDIA_AND_VIDEO,PHONE MEDICAL,PHONE MUSIC_AND_AUDIO,PHONE NEWS_AND_MAGAZINES,PHONE PERSONALIZATION,PHONE PHOTOGRAPHY,PHONE PRODUCTIVITY,PHONE SHOPPING,PHONE SOCIAL,PHONE SPORTS,PHONE TOOLS,PHONE TRANSPORTATION,PHONE TRAVEL_AND_LOCAL,PHONE WEATHER,CAMERA BOOKS_AND_REFERENCE,CAMERA BUSINESS,CAMERA COMMUNICATION,CAMERA EDUCATION,CAMERA ENTERTAINMENT,CAMERA FINANCE,CAMERA GAME_ACTION,CAMERA GAME_ADVENTURE,CAMERA GAME_ARCADE,CAMERA GAME_BOARD,CAMERA GAME_CARD,CAMERA GAME_CASINO,CAMERA GAME_CASUAL,CAMERA GAME_EDUCATIONAL,CAMERA GAME_PUZZLE,CAMERA GAME_ROLE_PLAYING,CAMERA GAME_SIMULATION,CAMERA GAME_SPORTS,CAMERA GAME_STRATEGY,CAMERA GAME_TRIVIA,CAMERA GAME_WORD,CAMERA HEALTH_AND_FITNESS,CAMERA LIBRARIES_AND_DEMO,CAMERA LIFESTYLE,CAMERA MEDIA_AND_VIDEO,CAMERA MEDICAL,CAMERA MUSIC_AND_AUDIO,CAMERA NEWS_AND_MAGAZINES,CAMERA PERSONALIZATION,CAMERA PHOTOGRAPHY,CAMERA PRODUCTIVITY,CAMERA SHOPPING,CAMERA SOCIAL,CAMERA SPORTS,CAMERA TOOLS,CAMERA TRANSPORTATION,CAMERA TRAVEL_AND_LOCAL,CAMERA WEATHER,CALENDAR BOOKS_AND_REFERENCE,CALENDAR BUSINESS,CALENDAR COMMUNICATION,CALENDAR EDUCATION,CALENDAR ENTERTAINMENT,CALENDAR FINANCE,CALENDAR GAME_ACTION,CALENDAR GAME_ADVENTURE,CALENDAR GAME_ARCADE,CALENDAR GAME_BOARD,CALENDAR GAME_CARD,CALENDAR GAME_CASINO,CALENDAR GAME_CASUAL,CALENDAR GAME_EDUCATIONAL,CALENDAR GAME_PUZZLE,CALENDAR GAME_ROLE_PLAYING,CALENDAR GAME_SIMULATION,CALENDAR GAME_SPORTS,CALENDAR GAME_STRATEGY,CALENDAR GAME_TRIVIA,CALENDAR GAME_WORD,CALENDAR HEALTH_AND_FITNESS,CALENDAR LIBRARIES_AND_DEMO,CALENDAR LIFESTYLE,CALENDAR MEDIA_AND_VIDEO,CALENDAR MEDICAL,CALENDAR MUSIC_AND_AUDIO,CALENDAR NEWS_AND_MAGAZINES,CALENDAR PERSONALIZATION,CALENDAR PHOTOGRAPHY,CALENDAR PRODUCTIVITY,CALENDAR SHOPPING,CALENDAR SOCIAL,CALENDAR SPORTS,CALENDAR TOOLS,CALENDAR TRANSPORTATION,CALENDAR TRAVEL_AND_LOCAL,CALENDAR WEATHER,clusters +1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,-0.5,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.2,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,-1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,1.0,1.0,-1.0,1.0,0.0,0.5,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.6,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,3 +1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,1.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.2,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,-0.333333333333,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.333333333333,0.0,1.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,-0.5,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +-1.0,-1.0,0.333333333333,0.0,-1.0,-0.666666666667,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,-1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.333333333333,1.0,0.666666666667,0.0,0.0,1.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.333333333333,0.0,1.0,0.0,1.0,-1.0,-1.0,0.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,-1.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,1.0,1.0,0.0,0.333333333333,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.714285714286,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,4 +0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,-1.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.5,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,1.0,0.714285714286,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-0.142857142857,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.6,0.0,1.0,0.0,0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.6,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,-1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,-1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,5 +0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.6,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.5,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.714285714286,0.0,1.0,1.0,0.0,0.0,0.5,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5 +0.0,0.0,1.0,1.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,0.0,1.0,-0.333333333333,1.0,0.666666666667,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,6 +-1.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-0.333333333333,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6 +0.0,0.0,1.0,1.0,1.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.6,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.6,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.6,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0.0,0.0,0.666666666667,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,1.0,0.0,0.0,-1.0,-1.0,1.0,-1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,1.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,1.0,-0.333333333333,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,1.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,-1.0,0.0,-1.0,1.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.6,0.0,0.0,0.0,-1.0,0.0,0.0,-0.333333333333,-0.333333333333,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,1.0,0.777777777778,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.142857142857,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,-0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.666666666667,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,9 +1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,1.0,0.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,1.0,-0.333333333333,-0.5,0.0,-0.333333333333,0.0,1.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +-1.0,-1.0,0.333333333333,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,1.0,-1.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +1.0,0.0,0.333333333333,0.0,0.333333333333,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.333333333333,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.714285714286,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0.0,1.0,-0.333333333333,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,-1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,4 +0.0,0.0,0.0,1.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-0.333333333333,0.0,1.0,0.0,0.333333333333,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,-0.333333333333,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-0.333333333333,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,-0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,-0.333333333333,0.0,1.0,0.0,0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,1.0,0.0,0.333333333333,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.6,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,0.333333333333,-1.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2,-1.0,0.5,-1.0,0.0,0.0,0.0,0.0,-1.0,0.5,0.0,-0.5,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,5 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +1.0,0.0,1.0,0.0,-1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5 +0.0,0.0,-0.333333333333,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,-0.5,0.0,0.0,-0.333333333333,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,-0.333333333333,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-0.333333333333,0.0,-1.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,7 +0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.5,0.0,1.0,0.0,0.0,1.0,0.0,0.666666666667,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,-1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.333333333333,0.0,0.0,1.0,-0.333333333333,0.333333333333,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +-1.0,0.0,0.5,0.0,1.0,-0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,1.0,1.0,1.0,1.0,0.5,1.0,0.0,0.333333333333,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.333333333333,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.2,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.5,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2 +0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +1.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1 +0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,-1.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,-1.0,-1.0,0.6,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.6,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,4 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +1.0,0.0,1.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,1.0,1.0,-1.0,-1.0,-1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.714285714286,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,-1.0,0.0,-1.0,1.0,0.0,-1.0,1.0,0.333333333333,1.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +-1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,-1.0,1.0,-1.0,0.0,-0.5,0.0,1.0,0.0,-1.0,0.0,0.714285714286,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.666666666667,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,4 +0.0,0.0,1.0,0.0,0.5,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.428571428571,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,-0.333333333333,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.6,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,-1.0,1.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.5,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +-1.0,-1.0,-1.0,0.0,-1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,-0.6,-1.0,-0.428571428571,0.0,-1.0,-1.0,-0.75,0.0,-1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,-0.6,-1.0,-1.0,0.0,-1.0,-1.0,-0.714285714286,0.0,-1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,-1.0,0.0,-1.0,-1.0,-1.0,0.0,-1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2,0.0,0.0,1.0,0.0,-1.0,0.0,-1.0,-1.0,-1.0,0.0,-1.0,-1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,0.0,0.0,-1.0,-1.0,-1.0,0.0,10 +0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.666666666667,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,0.666666666667,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.6,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,0.333333333333,0.0,0.0,1.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,-1.0,0.0,0.0,0.0,0.0,-1.0,1.0,-1.0,0.0,0.0,0.6,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,0.333333333333,0.0,-1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,-1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,5 +1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.6,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-0.5,0.0,0.0,1.0,0.0,0.333333333333,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,-0.333333333333,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,-1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.2,1.0,0.0,0.0,0.0,-1.0,0.333333333333,0.0,0.0,-0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2,1.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,-0.333333333333,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,1.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-1.0,0.0,-1.0,0.0,1.0,1.0,0.0,1.0,0.2,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,0.0,-0.5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,-1.0,-1.0,0.0,-1.0,0.0,-0.5,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,-1.0,0.0,-0.333333333333,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,1.0,0.0,0.0,0.333333333333,0.0,1.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,-1.0,0.0,-1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,-1.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,1.0,1.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1 +1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,1.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2 +1.0,1.0,1.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1 +0.0,-1.0,1.0,0.0,0.0,-1.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,1.0,0.0,0.0,-1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,-1.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.5,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.333333333333,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +1.0,1.0,1.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0,0.333333333333,0.0,1.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,3 +0.0,0.0,-1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,8 +0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8 From 9e5ae3bd08b39ec48f8007504d5617552f232bd1 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Sun, 5 Mar 2017 14:28:33 -0500 Subject: [PATCH 02/31] fix --- ml_util.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ml_util.py b/ml_util.py index 777df9e..f3dfb67 100644 --- a/ml_util.py +++ b/ml_util.py @@ -192,17 +192,13 @@ def __init__( self, dataset, sensitive=None, target=None): if target is None: target = self.original_data.columns[-1] - self.target_ix = target - if self.target_ix not in self.original_data: raise ValueError("unknown target feature %s" % self.target_ix) if sensitive is None: sensitive = self.original_data.columns[0] - self.sensitive_ix = sensitive - if self.sensitive_ix not in self.original_data: raise ValueError("unkown sensitive feature %s" % self.sensitive_ix) @@ -223,14 +219,14 @@ def __init__( self, dataset, sensitive=None, target=None): if self.target_ix in nominal_cols: targets = len(set(self.original_data[target])) if targets > 2: - print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles this correctly" % (target, targets) - del self.sup_ind[self.target_ix] + print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (target, targets) + del self.sup_ind[self.target_ix] # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) if self.sensitive_ix in nominal_cols: targets = len(set(self.original_data[sensitive])) if targets > 2: - print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that case correctly" % (sensitive, targets) + print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (sensitive, targets) self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) @@ -241,7 +237,7 @@ def __init__( self, dataset, sensitive=None, target=None): print "target feature = %s" % self.target_ix print "sensitive feature = %s" % self.sensitive_ix - + else: raise ValueError("Unknown dataset %s" % dataset) @@ -268,7 +264,11 @@ def make_super_indices( dataset ): def get_arguments(): parser = argparse.ArgumentParser() parser.add_argument('dataset', help='Name of dataset used') - parser.add_argument('-m', '--measure', default='average-local-inf', help='Quantity of interest') + parser.add_argument('-m', '--measure', + default='average-local-inf', + help='Quantity of interest', + choices=['average-local-inf','discrim-inf', + 'general-inf','banzhaf','shapley']) parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') parser.add_argument('-t', '--target', default=None, help='Target field', type=str) From 93da0bf4f0d8b2393a336be4733769ca8429494f Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Sun, 5 Mar 2017 14:39:39 -0500 Subject: [PATCH 03/31] fix general-inf --- qii.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qii.py b/qii.py index 97a2d7f..f772206 100644 --- a/qii.py +++ b/qii.py @@ -63,7 +63,7 @@ local_influence = numpy.zeros(y_pred.shape[0]) ls = [f_columns.get_loc(f) for f in sup_ind[sf]] for i in xrange(0, iters): - X_inter = random_intervene(numpy.array(X_test), ls) + X_inter = qii.random_intervene(numpy.array(X_test), ls) y_pred_inter = cls.predict(X_inter) local_influence = local_influence + y_pred_inter From b65e4b37416a5355360394f248ea6cf7ee4dbe81 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Tue, 7 Mar 2017 21:28:23 -0500 Subject: [PATCH 04/31] readme and arg reading fixes --- README.md | 23 +++++++++++------------ ml_util.py | 7 ++++--- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index be2f808..5b02eae 100644 --- a/README.md +++ b/README.md @@ -2,21 +2,20 @@ QII Code originally from Datta-Sen-Zick Oakland'16 To try on the adult dataset run: -python qii.py adult --show-plot +'''python qii.py adult --show-plot''' To see additional options: -Run python qii.py -h +Run '''python qii.py -h''' Currently supported datasets: -adult : UCI Income dataset -iwpc : Warfarin dosage -nlsy97 : Arrest prediction from the NLSY 97 +* adult : UCI Income dataset +* iwpc : Warfarin dosage +* nlsy97 : Arrest prediction from the NLSY 97 Currently supported measures: -discrim : Unary QII on discrimination -average-unary-individual : Average unary QII -unary-individual : Unary QII on individual outcome (use -i k) for kth individual -general-inf : Influence on average classification -shapley : Shapley QII (use -i k) for kth individual -banzhaf : Banzhaf QII (use -i k) for kth individual - +* discrim : Unary QII on discrimination +* average-unary-individual : Average unary QII +* unary-individual : Unary QII on individual outcome (use -i k) for kth individual +* general-inf : Influence on average classification +* shapley : Shapley QII (use -i k) for kth individual +* banzhaf : Banzhaf QII (use -i k) for kth individual diff --git a/ml_util.py b/ml_util.py index f3dfb67..a930ccf 100644 --- a/ml_util.py +++ b/ml_util.py @@ -265,10 +265,11 @@ def get_arguments(): parser = argparse.ArgumentParser() parser.add_argument('dataset', help='Name of dataset used') parser.add_argument('-m', '--measure', - default='average-local-inf', + default='average-unary-individual', help='Quantity of interest', - choices=['average-local-inf','discrim-inf', - 'general-inf','banzhaf','shapley']) + choices=['average-unary-individual','unary-individual', + 'discrim', 'general-inf', + 'banzhaf','shapley']) parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') parser.add_argument('-t', '--target', default=None, help='Target field', type=str) From be3fdb9700d769fbb32c93802ee476119b32d05a Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Tue, 7 Mar 2017 23:03:23 -0500 Subject: [PATCH 05/31] more command line options and todo notes --- ml_util.py | 24 +++++++++++++++++------- qii.py | 8 ++++++-- qii_lib.py | 2 +- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/ml_util.py b/ml_util.py index a930ccf..3355b43 100644 --- a/ml_util.py +++ b/ml_util.py @@ -278,12 +278,18 @@ def get_arguments(): parser.add_argument('-o', '--output-pdf', action='store_true', help='Output plot as pdf') parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) + + parser.add_argument('--max_depth', default=2, help='Max depth for decision trees and forests') + parser.add_argument('--n_estimators', default=20, help='Number of trees for decision forests') + parser.add_argument('--seed', default=0, help='Random seed') + parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') return parser.parse_args() -def split_and_train_classifier(classifier, dataset, scaler=None): +def split_and_train_classifier(args, dataset, scaler=None): + classifier = args.classifier ## Split data into training and test data X_train, X_test, y_train, y_test = cross_validation.train_test_split(dataset.num_data, dataset.target, train_size=0.40) @@ -299,25 +305,29 @@ def split_and_train_classifier(classifier, dataset, scaler=None): X_train = pd.DataFrame(scaler.transform(X_train), columns=(dataset.num_data.columns)) X_test = pd.DataFrame(scaler.transform(X_test), columns=(dataset.num_data.columns)) - cls = train_classifier(classifier, X_train, y_train) + cls = train_classifier(args, X_train, y_train) return (cls, scaler, X_train, X_test, y_train, y_test, sens_train, sens_test) -def train_classifier(classifier, X_train, y_train): +def train_classifier(args, X_train, y_train): + classifier = args.classifier #Initialize sklearn classifier model if (classifier == 'logistic'): import sklearn.linear_model as linear_model - cls = linear_model.LogisticRegression() + cls = linear_model.LogisticRegression(random_state=args.seed) elif (classifier == 'svm'): from sklearn import svm - cls = svm.SVC(kernel='linear', cache_size=7000) + cls = svm.SVC(kernel='linear', cache_size=7000, random_state=args.seed) elif (classifier == 'decision-tree'): import sklearn.linear_model as linear_model - cls = tree.DecisionTreeClassifier() + cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, random_state=args.seed) elif (classifier == 'decision-forest'): from sklearn.ensemble import GradientBoostingClassifier - cls = GradientBoostingClassifier(n_estimators=20, learning_rate=1.0, max_depth=2, random_state=0) + cls = GradientBoostingClassifier(n_estimators=args.n_estimators, + learning_rate=1.0, + max_depth=args.max_depth, + random_state=args.seed) #Train sklearn model cls.fit(X_train, y_train) diff --git a/qii.py b/qii.py index 82f8306..82aa605 100644 --- a/qii.py +++ b/qii.py @@ -12,6 +12,10 @@ from sklearn.datasets import load_svmlight_file +### TODO: +### USe the random seed from command line for other things than model training. +### - train/test splits +### - iterations in the various qii computations #def main(): @@ -34,7 +38,7 @@ ######### Begin Training Classifier ########## -cls, scaler, X_train, X_test, y_train, y_test, sens_train, sens_test = split_and_train_classifier(args.classifier, dataset) +cls, scaler, X_train, X_test, y_train, y_test, sens_train, sens_test = split_and_train_classifier(args, dataset) print('End Training Classifier') ######### End Training Classifier ########## @@ -57,7 +61,7 @@ if measure == 'unary-individual': print individual - x_individual = scaler.transform(dataset.num_data.ix[individual]) + x_individual = scaler.transform(dataset.num_data.ix[individual].reshape(1,-1)) (average_local_inf, counterfactuals) = qii.unary_individual_influence(dataset, cls, x_individual, X_test) average_local_inf_series = pd.Series(average_local_inf, index = average_local_inf.keys()) if (args.show_plot): diff --git a/qii_lib.py b/qii_lib.py index 7ff7993..59504a0 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -106,7 +106,7 @@ def average_local_influence(dataset, cls, X): @staticmethod def unary_individual_influence(dataset, cls, x_ind, X): - y_pred = cls.predict(x_ind) + y_pred = cls.predict(x_ind.reshape(1,-1)) average_local_inf = {} counterfactuals = {} iters = 1 From 0ce1391e8457e4e6e1987fa5024eb4099088bb28 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Mon, 20 Mar 2017 19:13:32 -0400 Subject: [PATCH 06/31] random seed for more things --- Makefile | 3 +++ ml_util.py | 25 +++++++++++++++++-------- qii_lib.py | 1 + 3 files changed, 21 insertions(+), 8 deletions(-) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..4c441dc --- /dev/null +++ b/Makefile @@ -0,0 +1,3 @@ +clean: + rm -Rf *.pyc + rm -Rf *~ diff --git a/ml_util.py b/ml_util.py index 3355b43..4f8782d 100644 --- a/ml_util.py +++ b/ml_util.py @@ -22,7 +22,6 @@ from os.path import exists - from qii_lib import * @@ -281,17 +280,25 @@ def get_arguments(): parser.add_argument('--max_depth', default=2, help='Max depth for decision trees and forests') parser.add_argument('--n_estimators', default=20, help='Number of trees for decision forests') - parser.add_argument('--seed', default=0, help='Random seed') + parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') - return parser.parse_args() + + args = parser.parse_args() + if args.seed is not None: + numpy.random.seed([args.seed]) + + return args def split_and_train_classifier(args, dataset, scaler=None): classifier = args.classifier ## Split data into training and test data - X_train, X_test, y_train, y_test = cross_validation.train_test_split(dataset.num_data, dataset.target, train_size=0.40) + X_train, X_test, y_train, y_test = cross_validation.train_test_split( + dataset.num_data, dataset.target, + train_size=0.40, + ) sens_train = dataset.get_sensitive(X_train) sens_test = dataset.get_sensitive(X_test) @@ -315,19 +322,21 @@ def train_classifier(args, X_train, y_train): #Initialize sklearn classifier model if (classifier == 'logistic'): import sklearn.linear_model as linear_model - cls = linear_model.LogisticRegression(random_state=args.seed) + cls = linear_model.LogisticRegression() elif (classifier == 'svm'): from sklearn import svm - cls = svm.SVC(kernel='linear', cache_size=7000, random_state=args.seed) + cls = svm.SVC(kernel='linear', cache_size=7000, + ) elif (classifier == 'decision-tree'): import sklearn.linear_model as linear_model - cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, random_state=args.seed) + cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, + ) elif (classifier == 'decision-forest'): from sklearn.ensemble import GradientBoostingClassifier cls = GradientBoostingClassifier(n_estimators=args.n_estimators, learning_rate=1.0, max_depth=args.max_depth, - random_state=args.seed) + ) #Train sklearn model cls.fit(X_train, y_train) diff --git a/qii_lib.py b/qii_lib.py index 59504a0..afaad2d 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -3,6 +3,7 @@ class qii: record_counterfactuals = True + #Constant intervention @staticmethod def intervene( X, features, x0 ): From 7305c4439c917848d104dba9d4ac24fa22614fbb Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Mon, 20 Mar 2017 19:26:53 -0400 Subject: [PATCH 07/31] update notes related to random seed --- qii.py | 26 +++++--------------------- 1 file changed, 5 insertions(+), 21 deletions(-) diff --git a/qii.py b/qii.py index 82aa605..7b95350 100644 --- a/qii.py +++ b/qii.py @@ -13,21 +13,20 @@ from sklearn.datasets import load_svmlight_file ### TODO: -### USe the random seed from command line for other things than model training. + +### Use the random seed from command line for other things than model training. + ### - train/test splits ### - iterations in the various qii computations -#def main(): - +### This might be taken care of by calling numpy.random.seed(), need +### to verify it applies to all used randomized methods. args = get_arguments() qii.record_counterfactuals = args.record_counterfactuals #Read dataset dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) -#if (args.erase_sensitive): -# print 'Erasing sensitive' -# dataset.delete_index(args.sensitive) measure = args.measure individual = args.individual @@ -78,13 +77,9 @@ plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') if measure == 'shapley': - #print individual - row_individual = dataset.num_data.ix[individual].reshape(1,-1) x_individual = scaler.transform(row_individual) - - #print dataset.num_data.ix[individual] shapley, counterfactuals = qii.shapley_influence(dataset, cls, x_individual, X_test) shapley_series = pd.Series(shapley, index = shapley.keys()) @@ -93,14 +88,3 @@ t1 = time.time() print (t1 - t0) - - - - - - - -#if __name__ == '__main__': -# main() - - From 18dc578963c19acafa3c2f068755baca6639f20d Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Sat, 25 Mar 2017 17:58:58 -0400 Subject: [PATCH 08/31] satisfying pylint --- Makefile | 3 ++ ml_util.py | 40 ++++++++++----- qii.py | 148 ++++++++++++++++++++++++++++++----------------------- qii_lib.py | 2 +- 4 files changed, 117 insertions(+), 76 deletions(-) diff --git a/Makefile b/Makefile index 4c441dc..bd8ef16 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,6 @@ +lint: + pylint qii.py + clean: rm -Rf *.pyc rm -Rf *~ diff --git a/ml_util.py b/ml_util.py index 4f8782d..d4a5cfc 100644 --- a/ml_util.py +++ b/ml_util.py @@ -267,8 +267,7 @@ def get_arguments(): default='average-unary-individual', help='Quantity of interest', choices=['average-unary-individual','unary-individual', - 'discrim', 'general-inf', - 'banzhaf','shapley']) + 'discrim', 'banzhaf', 'shapley']) parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') parser.add_argument('-t', '--target', default=None, help='Target field', type=str) @@ -292,29 +291,46 @@ def get_arguments(): return args +class Setup(argparse.Namespace): + def __init__(self, cls, x_test, y_test, sens_test, **kw): + self.cls = cls + self.x_test = x_test + self.y_test = y_test + self.sens_test = sens_test + #for k in kw: + # self.__setattr__(k, kw[k]) + argparse.Namespace.__init__(self, **kw) + def split_and_train_classifier(args, dataset, scaler=None): classifier = args.classifier ## Split data into training and test data - X_train, X_test, y_train, y_test = cross_validation.train_test_split( + x_train, x_test, y_train, y_test = cross_validation.train_test_split( dataset.num_data, dataset.target, train_size=0.40, ) - sens_train = dataset.get_sensitive(X_train) - sens_test = dataset.get_sensitive(X_test) + sens_train = dataset.get_sensitive(x_train) + sens_test = dataset.get_sensitive(x_test) if (scaler == None): #Initialize scaler to normalize training data scaler = preprocessing.StandardScaler() - scaler.fit(X_train) + scaler.fit(x_train) #Normalize all training and test data - X_train = pd.DataFrame(scaler.transform(X_train), columns=(dataset.num_data.columns)) - X_test = pd.DataFrame(scaler.transform(X_test), columns=(dataset.num_data.columns)) - - cls = train_classifier(args, X_train, y_train) - - return (cls, scaler, X_train, X_test, y_train, y_test, sens_train, sens_test) + x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) + x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) + + cls = train_classifier(args, x_train, y_train) + + return Setup(cls = cls, + scaler = scaler, + x_train = x_train, + x_test = x_test, + y_train = y_train, + y_test = y_test, + sens_train = sens_train, + sens_test = sens_test) def train_classifier(args, X_train, y_train): diff --git a/qii.py b/qii.py index 7b95350..1cd71d0 100644 --- a/qii.py +++ b/qii.py @@ -1,90 +1,112 @@ -import pandas as pd -import numpy as np -import sklearn as skl +""" QII mesurement script + +author: mostly Shayak + +""" + +import time + +import pandas as pd import numpy import numpy.linalg -import sys -import time -from ml_util import * -from qii_lib import * +from ml_util import split_and_train_classifier, get_arguments, \ + Dataset, measure_analytics, \ + plot_series_with_baseline, plot_series +from qii_lib import qii + +def __main__(): + args = get_arguments() + qii.record_counterfactuals = args.record_counterfactuals + + #Read dataset + dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) + #Get column names + #f_columns = dataset.num_data.columns + #sup_ind = dataset.sup_ind -from sklearn.datasets import load_svmlight_file + ######### Begin Training Classifier ########## -### TODO: + dat = split_and_train_classifier(args, dataset) -### Use the random seed from command line for other things than model training. + print 'End Training Classifier' + ######### End Training Classifier ########## -### - train/test splits -### - iterations in the various qii computations + measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) -### This might be taken care of by calling numpy.random.seed(), need -### to verify it applies to all used randomized methods. + t_start = time.time() -args = get_arguments() -qii.record_counterfactuals = args.record_counterfactuals + measures = {'discrim': eval_discrim, + 'average-unary-individual': eval_average_unary_individual, + 'unary-individual': eval_unary_individual, + 'banzhaf': eval_banzhaf, + 'shapley': eval_shapley} -#Read dataset -dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) + if args.measure in measures: + measures[args.measure](dataset, args, dat) + else: + raise ValueError("Unknown measure %s" % args.measure) -measure = args.measure -individual = args.individual + t_end = time.time() -#Get column names -f_columns = dataset.num_data.columns -sup_ind = dataset.sup_ind + print t_end - t_start -######### Begin Training Classifier ########## +def eval_discrim(dataset, args, dat): + """ Discrimination metric """ -cls, scaler, X_train, X_test, y_train, y_test, sens_train, sens_test = split_and_train_classifier(args, dataset) -print('End Training Classifier') -######### End Training Classifier ########## + baseline = qii.discrim(numpy.array(dat.x_test), dat.cls, numpy.array(dat.sens_test)) + discrim_inf = qii.discrim_influence(dataset, dat.cls, dat.x_test, dat.sens_test) + discrim_inf_series = pd.Series(discrim_inf, index=discrim_inf.keys()) + if args.show_plot: + plot_series_with_baseline( + discrim_inf_series, args, + 'Feature', 'QII on Group Disparity', + baseline) -measure_analytics(dataset, cls, X_test, y_test, sens_test) +def eval_average_unary_individual(dataset, args, dat): + """ Unary QII averaged over all individuals. """ -t0 = time.time() + average_local_inf, _ = qii.average_local_influence( + dataset, dat.cls, dat.x_test) + average_local_inf_series = pd.Series(average_local_inf, + index=average_local_inf.keys()) + if args.show_plot: + plot_series(average_local_inf_series, args, + 'Feature', 'QII on Outcomes') -if measure == 'discrim': - baseline = qii.discrim(numpy.array(X_test), cls, numpy.array(sens_test)) - discrim_inf = qii.discrim_influence(dataset, cls, X_test, sens_test) - discrim_inf_series = pd.Series(discrim_inf, index = discrim_inf.keys()) - if (args.show_plot): - plot_series_with_baseline(discrim_inf_series, args, 'Feature', 'QII on Group Disparity', baseline) +def eval_unary_individual(dataset, args, dat): + """ Unary QII. """ -if measure == 'average-unary-individual': - (average_local_inf, counterfactuals) = qii.average_local_influence(dataset, cls, X_test) - average_local_inf_series = pd.Series(average_local_inf, index = average_local_inf.keys()) - if (args.show_plot): - plot_series(average_local_inf_series, args, 'Feature', 'QII on Outcomes') + x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual].reshape(1, -1)) + average_local_inf, _ = qii.unary_individual_influence( + dataset, dat.cls, x_individual, dat.x_test) + average_local_inf_series = pd.Series( + average_local_inf, index=average_local_inf.keys()) + if args.show_plot: + plot_series(average_local_inf_series, args, + 'Feature', 'QII on Outcomes') -if measure == 'unary-individual': - print individual - x_individual = scaler.transform(dataset.num_data.ix[individual].reshape(1,-1)) - (average_local_inf, counterfactuals) = qii.unary_individual_influence(dataset, cls, x_individual, X_test) - average_local_inf_series = pd.Series(average_local_inf, index = average_local_inf.keys()) - if (args.show_plot): - plot_series(average_local_inf_series, args, 'Feature', 'QII on Outcomes') +def eval_banzhaf(dataset, args, dat): + """ Banzhaf metric. """ -if measure == 'banzhaf': - print individual - x_individual = scaler.transform(dataset.num_data.ix[individual]) - print dataset.num_data.ix[individual] + x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual]) - banzhaf = qii.banzhaf_influence(dataset, cls, x_individual, X_test) - banzhaf_series = pd.Series(banzhaf, index = banzhaf.keys()) - if (args.show_plot): + banzhaf = qii.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) + banzhaf_series = pd.Series(banzhaf, index=banzhaf.keys()) + if args.show_plot: plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') -if measure == 'shapley': - row_individual = dataset.num_data.ix[individual].reshape(1,-1) - - x_individual = scaler.transform(row_individual) +def eval_shapley(dataset, args, dat): + """ Shapley metric. """ + + row_individual = dataset.num_data.ix[args.individual].reshape(1, -1) + + x_individual = dat.scaler.transform(row_individual) - shapley, counterfactuals = qii.shapley_influence(dataset, cls, x_individual, X_test) - shapley_series = pd.Series(shapley, index = shapley.keys()) - if (args.show_plot): + shapley, _ = qii.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) + shapley_series = pd.Series(shapley, index=shapley.keys()) + if args.show_plot: plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') -t1 = time.time() -print (t1 - t0) +__main__() diff --git a/qii_lib.py b/qii_lib.py index afaad2d..765807b 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -191,7 +191,7 @@ def v(S, x, X_inter): - + @staticmethod def banzhaf_influence(dataset, cls, x_individual, X_test): p_samples = 600 s_samples = 600 From d653d77d40056132699d67eac664adef3813d975 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Mon, 27 Mar 2017 18:34:59 -0400 Subject: [PATCH 09/31] doing some pylinting --- Makefile | 11 +- ml_util.py | 2 +- qii.py | 17 +- qii_lib.py | 480 ++++++++++++++++++++++++++--------------------------- 4 files changed, 251 insertions(+), 259 deletions(-) diff --git a/Makefile b/Makefile index bd8ef16..347b1eb 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,12 @@ -lint: - pylint qii.py +pylint: *.py + pylint -f parseable -j 4 *.py + +test: + python qii.py -m average-unary-individual final.csv + python qii.py -m unary-individual final.csv + python qii.py -m discrim final.csv + python qii.py -m banzhaf final.csv + python qii.py -m shapley final.csv clean: rm -Rf *.pyc diff --git a/ml_util.py b/ml_util.py index d4a5cfc..faadf4e 100644 --- a/ml_util.py +++ b/ml_util.py @@ -414,7 +414,7 @@ def measure_analytics(dataset, cls, X, y, sens=None): error_rate = numpy.mean((y_pred != y)*1.) print('test error rate: %.3f' % error_rate) - discrim0 = qii.discrim(numpy.array(X), cls, numpy.array(sens)) + discrim0 = discrim(numpy.array(X), cls, numpy.array(sens)) print('Initial Discrimination: %.3f' % discrim0) from scipy.stats.stats import pearsonr diff --git a/qii.py b/qii.py index 1cd71d0..e6075b1 100644 --- a/qii.py +++ b/qii.py @@ -14,11 +14,12 @@ from ml_util import split_and_train_classifier, get_arguments, \ Dataset, measure_analytics, \ plot_series_with_baseline, plot_series -from qii_lib import qii + +import qii_lib def __main__(): args = get_arguments() - qii.record_counterfactuals = args.record_counterfactuals + qii_lib.record_counterfactuals = args.record_counterfactuals #Read dataset dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) @@ -55,8 +56,8 @@ def __main__(): def eval_discrim(dataset, args, dat): """ Discrimination metric """ - baseline = qii.discrim(numpy.array(dat.x_test), dat.cls, numpy.array(dat.sens_test)) - discrim_inf = qii.discrim_influence(dataset, dat.cls, dat.x_test, dat.sens_test) + baseline = qii_lib.discrim(numpy.array(dat.x_test), dat.cls, numpy.array(dat.sens_test)) + discrim_inf = qii_lib.discrim_influence(dataset, dat.cls, dat.x_test, dat.sens_test) discrim_inf_series = pd.Series(discrim_inf, index=discrim_inf.keys()) if args.show_plot: plot_series_with_baseline( @@ -67,7 +68,7 @@ def eval_discrim(dataset, args, dat): def eval_average_unary_individual(dataset, args, dat): """ Unary QII averaged over all individuals. """ - average_local_inf, _ = qii.average_local_influence( + average_local_inf, _ = qii_lib.average_local_influence( dataset, dat.cls, dat.x_test) average_local_inf_series = pd.Series(average_local_inf, index=average_local_inf.keys()) @@ -79,7 +80,7 @@ def eval_unary_individual(dataset, args, dat): """ Unary QII. """ x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual].reshape(1, -1)) - average_local_inf, _ = qii.unary_individual_influence( + average_local_inf, _ = qii_lib.unary_individual_influence( dataset, dat.cls, x_individual, dat.x_test) average_local_inf_series = pd.Series( average_local_inf, index=average_local_inf.keys()) @@ -92,7 +93,7 @@ def eval_banzhaf(dataset, args, dat): x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual]) - banzhaf = qii.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) + banzhaf = qii_lib.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) banzhaf_series = pd.Series(banzhaf, index=banzhaf.keys()) if args.show_plot: plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') @@ -104,7 +105,7 @@ def eval_shapley(dataset, args, dat): x_individual = dat.scaler.transform(row_individual) - shapley, _ = qii.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) + shapley, _ = qii_lib.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) shapley_series = pd.Series(shapley, index=shapley.keys()) if args.show_plot: plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') diff --git a/qii_lib.py b/qii_lib.py index 765807b..26c8c83 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -1,253 +1,237 @@ +""" Various QII related computations. """ + import pandas as pd import numpy -class qii: - record_counterfactuals = True - - #Constant intervention - @staticmethod - def intervene( X, features, x0 ): - X = numpy.array(X, copy=True) - x0 = x0.T - for f in features: - X[:,f] = x0[f] - return X - - #Causal Measure with a constant intervention - @staticmethod - def causal_measure ( clf, X, ep_state, f, x0 ): - c0 = clf.predict(x0) - X1 = intervene( X, ep_state, x0 ) - p1 = numpy.mean(1.*(clf.predict(X1) == c0)) - - X2 = intervene( X, ep_state + [f], x0 ) - p2 = numpy.mean(1.*(clf.predict(X2) == c0)) - - return p2 - p1 - - #Randomly intervene on a a set of columns of X - @staticmethod - def random_intervene( X, cols ): - n = X.shape[0] - order = numpy.random.permutation(range(n)) - X_int = numpy.array(X) - for c in cols: - X_int[:, c] = X_int[order, c] - return X_int - - #Randomly intervene on a a set of columns of x from X - @staticmethod - def random_intervene_point( X, cols, x0 ): - n = X.shape[0] - order = numpy.random.permutation(range(n)) - X_int = numpy.tile(x0, (n, 1)) - for c in cols: - X_int[:, c] = X[order, c] - return X_int - - - @staticmethod - def discrim (X, cls, sens): - not_sens = 1 - sens - y_pred = cls.predict(X) - discrim = numpy.abs(numpy.dot(y_pred,not_sens)/sum(not_sens) - - numpy.dot(y_pred,sens)/sum(sens)) - return discrim - - @staticmethod - def discrim_ratio (X, cls, sens): - not_sens = 1 - sens - y_pred = cls.predict(X) - sens_rate = numpy.dot(y_pred,sens)/sum(sens) - not_sens_rate = numpy.dot(y_pred,not_sens)/sum(not_sens) - - discrim = not_sens_rate/sens_rate - return discrim - - - - #Measure influence on discrimination - @staticmethod - def discrim_influence(dataset, cls, X_test, sens_test): - discrim_inf = {} - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - for sf in sup_ind: - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - X_inter = qii.random_intervene(numpy.array(X_test), ls) - discrim_inter = qii.discrim(X_inter, cls, numpy.array(sens_test)) - discrim_inf[sf] = discrim_inter - print('Discrimination %s: %.3f' % (sf, discrim_inf[sf])) - return discrim_inf - - @staticmethod - def average_local_influence(dataset, cls, X): - average_local_inf = {} - counterfactuals = {} - iters = 10 - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - y_pred = cls.predict(X) - for sf in sup_ind: - local_influence = numpy.zeros(y_pred.shape[0]) - if qii.record_counterfactuals: - counterfactuals[sf] = (numpy.tile(X, (iters,1)), numpy.tile(X, (iters,1))) - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - for i in xrange(0, iters): - X_inter = qii.random_intervene(numpy.array(X), ls) - y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter)*1. - if qii.record_counterfactuals: - n = X_inter.shape[0] - counterfactuals[sf][1][i*n:(i+1)*n]=X_inter - - average_local_inf[sf] = 1 - (local_influence/iters).mean() - #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) - return (average_local_inf, counterfactuals) - - @staticmethod - def unary_individual_influence(dataset, cls, x_ind, X): - y_pred = cls.predict(x_ind.reshape(1,-1)) - average_local_inf = {} +RECORD_COUNTERFACTUALS = True + +def intervene(X, features, x0): + """ Constant intervention """ + + X = numpy.array(X, copy=True) + x0 = x0.T + for f in features: + X[:, f] = x0[f] + return X + +def causal_measure(clf, X, ep_state, f, x0): + """ Causal Measure with a constant intervention. """ + + c0 = clf.predict(x0) + X1 = intervene(X, ep_state, x0) + p1 = numpy.mean(1.*(clf.predict(X1) == c0)) + + X2 = intervene(X, ep_state + [f], x0) + p2 = numpy.mean(1.*(clf.predict(X2) == c0)) + + return p2 - p1 + +def random_intervene(X, cols): + """ Randomly intervene on a a set of columns of X. """ + + n = X.shape[0] + order = numpy.random.permutation(range(n)) + X_int = numpy.array(X) + for c in cols: + X_int[:, c] = X_int[order, c] + return X_int + +def random_intervene_point(X, cols, x0): + """ Randomly intervene on a a set of columns of x from X. """ + n = X.shape[0] + order = numpy.random.permutation(range(n)) + X_int = numpy.tile(x0, (n, 1)) + for c in cols: + X_int[:, c] = X[order, c] + return X_int + +def discrim(X, cls, sens): + not_sens = 1 - sens + y_pred = cls.predict(X) + discrim = numpy.abs(numpy.dot(y_pred, not_sens)/sum(not_sens) + - numpy.dot(y_pred, sens)/sum(sens)) + return discrim + +def discrim_ratio(X, cls, sens): + not_sens = 1 - sens + y_pred = cls.predict(X) + sens_rate = numpy.dot(y_pred, sens)/sum(sens) + not_sens_rate = numpy.dot(y_pred, not_sens)/sum(not_sens) + + discrim = not_sens_rate/sens_rate + return discrim + +def discrim_influence(dataset, cls, X_test, sens_test): + """ Measure influence on discrimination. """ + + discrim_inf = {} + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + for sf in sup_ind: + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + X_inter = random_intervene(numpy.array(X_test), ls) + discrim_inter = discrim(X_inter, cls, numpy.array(sens_test)) + discrim_inf[sf] = discrim_inter + print 'Discrimination %s: %.3f' % (sf, discrim_inf[sf]) + return discrim_inf + +def average_local_influence(dataset, cls, X): + average_local_inf = {} + counterfactuals = {} + iters = 10 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + y_pred = cls.predict(X) + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene(numpy.array(X), ls) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter)*1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i*n:(i+1)*n] = X_inter + + average_local_inf[sf] = 1 - (local_influence/iters).mean() + #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + return (average_local_inf, counterfactuals) + +def unary_individual_influence(dataset, cls, x_ind, X): + y_pred = cls.predict(x_ind.reshape(1, -1)) + average_local_inf = {} + counterfactuals = {} + iters = 1 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene_point(numpy.array(X), ls, x_ind) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter)*1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i*n:(i+1)*n] = X_inter + + average_local_inf[sf] = 1 - (local_influence/iters).mean() + #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + return (average_local_inf, counterfactuals) + +def shapley_influence(dataset, cls, x_individual, X_test): + p_samples = 600 + s_samples = 600 + + def v(S, x, X_inter): + x_rep = numpy.tile(x, (p_samples, 1)) + for f in S: + x_rep[:, f] = X_inter[:, f] + p = ((cls.predict(x_rep) == y0)*1.).mean() + return (p, x_rep) + + #min_i = numpy.argmin(sum_local_influence) + y0 = cls.predict(x_individual) + print y0 + b = numpy.random.randint(0, X_test.shape[0], p_samples) + X_sample = numpy.array(X_test.ix[b]) + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + super_indices = dataset.sup_ind.keys() + + shapley = dict.fromkeys(super_indices, 0) + if RECORD_COUNTERFACTUALS: + base = numpy.tile(x_individual, (2*p_samples*s_samples, 1)) + #counterfactuals = dict([(sf, (base, numpy.zeros(p_samples*s_samples*2, X_test.shape[1]))) + # for sf in dataset.sup_ind.keys()]) + + counterfactuals = dict([(sf, (base, + numpy.zeros((p_samples*s_samples*2, X_test.shape[1])))) + for sf in dataset.sup_ind.keys()]) + else: counterfactuals = {} - iters = 1 - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - for sf in sup_ind: - local_influence = numpy.zeros(y_pred.shape[0]) - if qii.record_counterfactuals: - counterfactuals[sf] = (numpy.tile(X, (iters,1)), numpy.tile(X, (iters,1))) - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - for i in xrange(0, iters): - X_inter = qii.random_intervene_point(numpy.array(X), ls, x_ind) - y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter)*1. - if qii.record_counterfactuals: - n = X_inter.shape[0] - counterfactuals[sf][1][i*n:(i+1)*n]=X_inter - - average_local_inf[sf] = 1 - (local_influence/iters).mean() - #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) - return (average_local_inf, counterfactuals) - - - - @staticmethod - def shapley_influence(dataset, cls, x_individual, X_test): - p_samples = 600 - s_samples = 600 - - def v(S, x, X_inter): - x_rep = numpy.tile(x, (p_samples, 1)) - for f in S: - x_rep[:,f] = X_inter[:,f] - p = ((cls.predict(x_rep) == y0)*1.).mean() - return (p, x_rep) - - - #min_i = numpy.argmin(sum_local_influence) - y0 = cls.predict(x_individual) - print y0 - b = numpy.random.randint(0,X_test.shape[0],p_samples) - X_sample = numpy.array(X_test.ix[b]) - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - super_indices = dataset.sup_ind.keys() - - shapley = dict.fromkeys(super_indices, 0) - if (qii.record_counterfactuals): - base = numpy.tile(x_individual, (2*p_samples*s_samples, 1)) - #counterfactuals = dict([(sf, (base, numpy.zeros(p_samples*s_samples*2, X_test.shape[1]))) - # for sf in dataset.sup_ind.keys()]) - counterfactuals = dict([(sf, (base, numpy.zeros((p_samples*s_samples*2, X_test.shape[1])))) - for sf in dataset.sup_ind.keys()]) - else: - counterfactuals = {} - - for sample in xrange(0, s_samples): - perm = numpy.random.permutation(len(super_indices)) - for i in xrange(0, len(super_indices)): - # Choose a random subset and get string indices by flattening - # excluding si - si = super_indices[perm[i]] - S_m_si = sum([sup_ind[super_indices[perm[j]]] for j in xrange(0, i)], []) - #translate into intiger indices - ls_m_si = [f_columns.get_loc(f) for f in S_m_si] - #repeat x_individual_rep - (p_S, X_S) = v(ls_m_si, x_individual, X_sample) - #also intervene on s_i - ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] - (p_S_si, X_S_si) = v(ls_m_si + ls_si, x_individual, X_sample) - shapley[si] = shapley[si] - (p_S_si - p_S)/s_samples - - if (qii.record_counterfactuals): - start_ind = 2*sample*p_samples - mid_ind = (2*sample+1)*p_samples - end_ind = 2*(sample+1)*p_samples - counterfactuals[si][1][start_ind:mid_ind] = X_S - counterfactuals[si][1][mid_ind:end_ind] = X_S_si - - return (shapley, counterfactuals) - - - - @staticmethod - def banzhaf_influence(dataset, cls, x_individual, X_test): - p_samples = 600 - s_samples = 600 - - def v(S, x, X_inter): - x_rep = numpy.tile(x, (p_samples, 1)) - for f in S: - x_rep[:,f] = X_inter[:,f] - p = ((cls.predict(x_rep) == y0)*1.).mean() - return p - - #min_i = numpy.argmin(sum_local_influence) - y0 = cls.predict(x_individual) - b = numpy.random.randint(0,X_test.shape[0],p_samples) - X_sample = numpy.array(X_test.ix[b]) - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - super_indices = dataset.sup_ind.keys() - - banzhaf = dict.fromkeys(super_indices, 0) - - for sample in xrange(0, s_samples): - r = numpy.random.ranf(len(super_indices)) - S = [super_indices[i] for i in xrange(0, len(super_indices)) if r[i] > 0.5] - for si in super_indices: - # Choose a random subset and get string indices by flattening - # excluding si - S_m_si = sum([sup_ind[x] for x in S if x != si], []) - #translate into intiger indices - ls_m_si = [f_columns.get_loc(f) for f in S_m_si] - #repeat x_individual_rep - p_S = v(ls_m_si, x_individual, X_sample) - #also intervene on s_i - ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] - p_S_si = v(ls_m_si + ls_si, x_individual, X_sample) - banzhaf[si] = banzhaf[si] - (p_S - p_S_si)/s_samples - return banzhaf - - @staticmethod - def analyze_outliers(counterfactuals, out_cls, cls): - outlier_fracs = {} - new_outlier_fracs = {} - qii = {} - for sf,pairs in counterfactuals.iteritems(): - X = pairs[0] - X_cf = pairs[1] - outs_X = out_cls.predict(X) == -1 - outs_X_cf = out_cls.predict(X_cf) == -1 - outlier_fracs[sf] = numpy.mean(outs_X_cf) - lnot = numpy.logical_not - land = numpy.logical_and - old_outlier_frac = numpy.mean(lnot(outs_X)) - new_outlier_fracs[sf] = numpy.mean(land(lnot(outs_X), outs_X_cf))/old_outlier_frac - qii = numpy.mean(cls.predict(X) != cls.predict(X_cf)) - print('QII %s %.3f' % (sf, qii)) - return (outlier_fracs, new_outlier_fracs) - - + for sample in xrange(0, s_samples): + perm = numpy.random.permutation(len(super_indices)) + for i in xrange(0, len(super_indices)): + # Choose a random subset and get string indices by flattening + # excluding si + si = super_indices[perm[i]] + S_m_si = sum([sup_ind[super_indices[perm[j]]] for j in xrange(0, i)], []) + #translate into intiger indices + ls_m_si = [f_columns.get_loc(f) for f in S_m_si] + #repeat x_individual_rep + (p_S, X_S) = v(ls_m_si, x_individual, X_sample) + #also intervene on s_i + ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] + (p_S_si, X_S_si) = v(ls_m_si + ls_si, x_individual, X_sample) + shapley[si] = shapley[si] - (p_S_si - p_S)/s_samples + + if RECORD_COUNTERFACTUALS: + start_ind = 2*sample*p_samples + mid_ind = (2*sample+1)*p_samples + end_ind = 2*(sample+1)*p_samples + counterfactuals[si][1][start_ind:mid_ind] = X_S + counterfactuals[si][1][mid_ind:end_ind] = X_S_si + + return (shapley, counterfactuals) + +def banzhaf_influence(dataset, cls, x_individual, X_test): + p_samples = 600 + s_samples = 600 + + def v(S, x, X_inter): + x_rep = numpy.tile(x, (p_samples, 1)) + for f in S: + x_rep[:, f] = X_inter[:, f] + p = ((cls.predict(x_rep) == y0)*1.).mean() + return p + + #min_i = numpy.argmin(sum_local_influence) + y0 = cls.predict(x_individual) + b = numpy.random.randint(0, X_test.shape[0], p_samples) + X_sample = numpy.array(X_test.ix[b]) + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + super_indices = dataset.sup_ind.keys() + + banzhaf = dict.fromkeys(super_indices, 0) + + for sample in xrange(0, s_samples): + r = numpy.random.ranf(len(super_indices)) + S = [super_indices[i] for i in xrange(0, len(super_indices)) if r[i] > 0.5] + for si in super_indices: + # Choose a random subset and get string indices by flattening + # excluding si + S_m_si = sum([sup_ind[x] for x in S if x != si], []) + #translate into intiger indices + ls_m_si = [f_columns.get_loc(f) for f in S_m_si] + #repeat x_individual_rep + p_S = v(ls_m_si, x_individual, X_sample) + #also intervene on s_i + ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] + p_S_si = v(ls_m_si + ls_si, x_individual, X_sample) + banzhaf[si] = banzhaf[si] - (p_S - p_S_si)/s_samples + return banzhaf + +def analyze_outliers(counterfactuals, out_cls, cls): + outlier_fracs = {} + new_outlier_fracs = {} + qii = {} + for sf, pairs in counterfactuals.iteritems(): + X = pairs[0] + X_cf = pairs[1] + outs_X = out_cls.predict(X) == -1 + outs_X_cf = out_cls.predict(X_cf) == -1 + outlier_fracs[sf] = numpy.mean(outs_X_cf) + lnot = numpy.logical_not + land = numpy.logical_and + old_outlier_frac = numpy.mean(lnot(outs_X)) + new_outlier_fracs[sf] = numpy.mean(land(lnot(outs_X), outs_X_cf))/old_outlier_frac + qii = numpy.mean(cls.predict(X) != cls.predict(X_cf)) + print 'QII %s %.3f' % (sf, qii) + return (outlier_fracs, new_outlier_fracs) From 252ec803b1c90c044416a4b22f52716e2cda55b4 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Wed, 29 Mar 2017 16:29:40 -0400 Subject: [PATCH 10/31] adding shapley test case --- Makefile | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/Makefile b/Makefile index 347b1eb..9a8c916 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,16 @@ +OS := $(shell uname) +ifeq ($(OS), Darwin) + TIME := time -l +else + TIME := time -v +endif + pylint: *.py pylint -f parseable -j 4 *.py +test-shapley: + $(TIME) python qii.py -m shapley final.csv + test: python qii.py -m average-unary-individual final.csv python qii.py -m unary-individual final.csv From 207bc3f9c92c5aa79d87757abeadec6dd70baf27 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Wed, 29 Mar 2017 16:37:14 -0400 Subject: [PATCH 11/31] time commands fix --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 9a8c916..7c1d8e0 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,8 @@ OS := $(shell uname) ifeq ($(OS), Darwin) - TIME := time -l + TIME := /usr/bin/time -l else - TIME := time -v + TIME := /usr/bin/time -v endif pylint: *.py From 75e5c98056786360c6453041178764d5ceca23e9 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Wed, 29 Mar 2017 17:40:13 -0400 Subject: [PATCH 12/31] forgot something --- Makefile | 10 +++++----- ml_util.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Makefile b/Makefile index 7c1d8e0..7c9c61b 100644 --- a/Makefile +++ b/Makefile @@ -12,11 +12,11 @@ test-shapley: $(TIME) python qii.py -m shapley final.csv test: - python qii.py -m average-unary-individual final.csv - python qii.py -m unary-individual final.csv - python qii.py -m discrim final.csv - python qii.py -m banzhaf final.csv - python qii.py -m shapley final.csv + $(TIME) python qii.py -m average-unary-individual final.csv + $(TIME) python qii.py -m unary-individual final.csv + $(TIME) python qii.py -m discrim final.csv + $(TIME) python qii.py -m banzhaf final.csv + $(TIME) python qii.py -m shapley final.csv clean: rm -Rf *.pyc diff --git a/ml_util.py b/ml_util.py index faadf4e..758ab1e 100644 --- a/ml_util.py +++ b/ml_util.py @@ -277,9 +277,9 @@ def get_arguments(): parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) - parser.add_argument('--max_depth', default=2, help='Max depth for decision trees and forests') - parser.add_argument('--n_estimators', default=20, help='Number of trees for decision forests') - parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) + parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') + parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') + parser.add_argument('--seed', type=int, default=None, help='Random seed, auto seeded if not specified', type=int) parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') From 853b3438ddfd4d17e0762444afc317fe45a274e3 Mon Sep 17 00:00:00 2001 From: Piotr Mardziel Date: Wed, 29 Mar 2017 17:55:49 -0400 Subject: [PATCH 13/31] fix things --- Makefile | 2 +- ml_util.py | 2 +- qii_lib.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 7c9c61b..40eec09 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ pylint: *.py pylint -f parseable -j 4 *.py test-shapley: - $(TIME) python qii.py -m shapley final.csv + $(TIME) python qii.py -m shapley final.csv --show test: $(TIME) python qii.py -m average-unary-individual final.csv diff --git a/ml_util.py b/ml_util.py index 758ab1e..bb748cf 100644 --- a/ml_util.py +++ b/ml_util.py @@ -279,7 +279,7 @@ def get_arguments(): parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') - parser.add_argument('--seed', type=int, default=None, help='Random seed, auto seeded if not specified', type=int) + parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') diff --git a/qii_lib.py b/qii_lib.py index 26c8c83..85897d5 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -3,7 +3,7 @@ import pandas as pd import numpy -RECORD_COUNTERFACTUALS = True +RECORD_COUNTERFACTUALS = False def intervene(X, features, x0): """ Constant intervention """ From fda878424cb175341b8357fbbc09ff2ce55588b1 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 3 Oct 2017 22:12:50 -0700 Subject: [PATCH 14/31] fixing plot show and output pdf options --- ml_util.py | 3 ++- qii.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/ml_util.py b/ml_util.py index bb748cf..3361ab4 100644 --- a/ml_util.py +++ b/ml_util.py @@ -374,7 +374,8 @@ def plot_series(series, args, xlabel, ylabel): print ('Writing to figure-' + measure + '-' + args.dataset + '-' + args.classifier + '.pdf') pp.savefig(bbox_inches='tight') pp.close() - plt.show() + if (args.show_plot == True): + plt.show() def plot_series_with_baseline(series, args, xlabel, ylabel, baseline): diff --git a/qii.py b/qii.py index e6075b1..3e6e012 100644 --- a/qii.py +++ b/qii.py @@ -72,7 +72,7 @@ def eval_average_unary_individual(dataset, args, dat): dataset, dat.cls, dat.x_test) average_local_inf_series = pd.Series(average_local_inf, index=average_local_inf.keys()) - if args.show_plot: + if args.show_plot or args.output_pdf: plot_series(average_local_inf_series, args, 'Feature', 'QII on Outcomes') @@ -84,7 +84,7 @@ def eval_unary_individual(dataset, args, dat): dataset, dat.cls, x_individual, dat.x_test) average_local_inf_series = pd.Series( average_local_inf, index=average_local_inf.keys()) - if args.show_plot: + if args.show_plot or args.output_pdf: plot_series(average_local_inf_series, args, 'Feature', 'QII on Outcomes') @@ -95,7 +95,7 @@ def eval_banzhaf(dataset, args, dat): banzhaf = qii_lib.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) banzhaf_series = pd.Series(banzhaf, index=banzhaf.keys()) - if args.show_plot: + if args.show_plot or args.output_pdf: plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') def eval_shapley(dataset, args, dat): @@ -107,7 +107,7 @@ def eval_shapley(dataset, args, dat): shapley, _ = qii_lib.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) shapley_series = pd.Series(shapley, index=shapley.keys()) - if args.show_plot: + if args.show_plot or args.output_pdf: plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') __main__() From 5a2002f30e03e96f98ba8c94560fcb50f3dc2565 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 3 Oct 2017 22:27:31 -0700 Subject: [PATCH 15/31] fixing export_pdf --- ml_util.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ml_util.py b/ml_util.py index 3361ab4..8454ae5 100644 --- a/ml_util.py +++ b/ml_util.py @@ -370,8 +370,8 @@ def plot_series(series, args, xlabel, ylabel): plt.ylabel(ylabel, labelfont) plt.tight_layout() if (args.output_pdf == True): - pp = PdfPages('figure-' + measure + '-' + args.dataset + '-' + args.classifier +'.pdf') - print ('Writing to figure-' + measure + '-' + args.dataset + '-' + args.classifier + '.pdf') + pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier +'.pdf') + print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') pp.savefig(bbox_inches='tight') pp.close() if (args.show_plot == True): @@ -402,8 +402,8 @@ def plot_series_with_baseline(series, args, xlabel, ylabel, baseline): plt.ylabel(ylabel, labelfont) plt.tight_layout() if (args.output_pdf == True): - pp = PdfPages('figure-' + measure + '-' + dataset.name + '-' + dataset.sensitive_ix + '-' + args.classifier + '.pdf') - print ('Writing to figure-' + measure + '-' + dataset.name + '-' + dataset.sensitive_ix + '-' + args.classifier + '.pdf') + pp = PdfPages('figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + print ('Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') pp.savefig() pp.close() plt.show() From 605482fa160546f8a8bfa77366896ca93cc1da95 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 3 Oct 2017 22:28:24 -0700 Subject: [PATCH 16/31] rajkiran/malware_specific --- qii.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qii.py b/qii.py index 3e6e012..fc4e871 100644 --- a/qii.py +++ b/qii.py @@ -72,8 +72,9 @@ def eval_average_unary_individual(dataset, args, dat): dataset, dat.cls, dat.x_test) average_local_inf_series = pd.Series(average_local_inf, index=average_local_inf.keys()) + top_40 = average_local_inf_series.sort_values(ascending=False).head(40) if args.show_plot or args.output_pdf: - plot_series(average_local_inf_series, args, + plot_series(top_40, args, 'Feature', 'QII on Outcomes') def eval_unary_individual(dataset, args, dat): From fd291f1b798d6b3f7fe572b08b90eebd6e0c0717 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 31 Oct 2017 23:22:09 -0700 Subject: [PATCH 17/31] adding average class influence measure --- ml_util.py | 17 ++++++++++++++--- qii.py | 15 ++++++++++++++- qii_lib.py | 36 ++++++++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 4 deletions(-) diff --git a/ml_util.py b/ml_util.py index 8454ae5..0466f06 100644 --- a/ml_util.py +++ b/ml_util.py @@ -13,7 +13,7 @@ import numpy import numpy.random import arff - +import pdb import numpy.linalg import sys from matplotlib.backends.backend_pdf import PdfPages @@ -267,7 +267,7 @@ def get_arguments(): default='average-unary-individual', help='Quantity of interest', choices=['average-unary-individual','unary-individual', - 'discrim', 'banzhaf', 'shapley']) + 'discrim', 'banzhaf', 'shapley', 'average-unary-class']) parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') parser.add_argument('-t', '--target', default=None, help='Target field', type=str) @@ -284,6 +284,7 @@ def get_arguments(): parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') + parser.add_argument('-q', '--class_influence', default=None, type=int, help='Index of the the target class for causal analysis') args = parser.parse_args() if args.seed is not None: @@ -292,10 +293,11 @@ def get_arguments(): return args class Setup(argparse.Namespace): - def __init__(self, cls, x_test, y_test, sens_test, **kw): + def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): self.cls = cls self.x_test = x_test self.y_test = y_test + self.x_target_class = x_target_class self.sens_test = sens_test #for k in kw: # self.__setattr__(k, kw[k]) @@ -308,6 +310,12 @@ def split_and_train_classifier(args, dataset, scaler=None): dataset.num_data, dataset.target, train_size=0.40, ) + + x_target_class = None + if args.class_influence is not None: + target_class_type = type(y_test.iloc[0]) + target_class = target_class_type(args.class_influence) + x_target_class = x_test[y_test == target_class] sens_train = dataset.get_sensitive(x_train) sens_test = dataset.get_sensitive(x_test) @@ -320,6 +328,8 @@ def split_and_train_classifier(args, dataset, scaler=None): #Normalize all training and test data x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) + if x_target_class is not None: + x_target_class = pd.DataFrame(scaler.transform(x_target_class), columns=(dataset.num_data.columns)) cls = train_classifier(args, x_train, y_train) @@ -329,6 +339,7 @@ def split_and_train_classifier(args, dataset, scaler=None): x_test = x_test, y_train = y_train, y_test = y_test, + x_target_class = x_target_class, sens_train = sens_train, sens_test = sens_test) diff --git a/qii.py b/qii.py index fc4e871..c4e5f56 100644 --- a/qii.py +++ b/qii.py @@ -42,7 +42,8 @@ def __main__(): 'average-unary-individual': eval_average_unary_individual, 'unary-individual': eval_unary_individual, 'banzhaf': eval_banzhaf, - 'shapley': eval_shapley} + 'shapley': eval_shapley, + 'average-unary-class' : eval_class_average_unary } if args.measure in measures: measures[args.measure](dataset, args, dat) @@ -111,4 +112,16 @@ def eval_shapley(dataset, args, dat): if args.show_plot or args.output_pdf: plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') +def eval_class_average_unary(dataset, args, dat): + """ Unary QII averaged over all individuals for a particular class """ + + average_local_inf, _ = qii_lib.average_local_class_influence( + dataset, dat.cls, dat.x_test, dat.x_target_class) + average_local_inf_series = pd.Series(average_local_inf, + index=average_local_inf.keys()) + top_40 = average_local_inf_series.sort_values(ascending=False).head(40) + if args.show_plot or args.output_pdf: + plot_series(top_40, args, + 'Feature', 'QII on Outcomes') + __main__() diff --git a/qii_lib.py b/qii_lib.py index 85897d5..8222b1f 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -36,6 +36,17 @@ def random_intervene(X, cols): X_int[:, c] = X_int[order, c] return X_int +def random_intervene_class(X, target_class_X, cols): + """ Randomly intervene on a a set of columns of target_class_X. """ + n = X.shape[0] + p = target_class_X.shape[0] + order = numpy.random.choice(n,p) + target_int = numpy.array(target_class_X) + for c in cols: + target_int[:, c] = X[order, c] + return target_int + + def random_intervene_point(X, cols, x0): """ Randomly intervene on a a set of columns of x from X. """ n = X.shape[0] @@ -99,6 +110,31 @@ def average_local_influence(dataset, cls, X): #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) return (average_local_inf, counterfactuals) +def average_local_class_influence(dataset, cls, X, target_class_X): + average_local_inf_class = {} + counterfactuals = {} + iters = 10 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + y_pred = cls.predict(target_class_X) + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(target_class_X, (iters, 1)), numpy.tile(target_class_X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene_class(numpy.array(X), numpy.array(target_class_X), ls) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter)*1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i*n:(i+1)*n] = X_inter + + average_local_inf_class[sf] = 1 - (local_influence/iters).mean() + #print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + return (average_local_inf_class, counterfactuals) + + def unary_individual_influence(dataset, cls, x_ind, X): y_pred = cls.predict(x_ind.reshape(1, -1)) average_local_inf = {} From 37db9132eb4cf18d4287ae64e1909e3dceefbe0f Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 31 Oct 2017 23:23:49 -0700 Subject: [PATCH 18/31] removing pdb import --- ml_util.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ml_util.py b/ml_util.py index 0466f06..b1bc248 100644 --- a/ml_util.py +++ b/ml_util.py @@ -13,7 +13,6 @@ import numpy import numpy.random import arff -import pdb import numpy.linalg import sys from matplotlib.backends.backend_pdf import PdfPages From db0c016b9b7a7cb4d6b4497777b9279b29f3b203 Mon Sep 17 00:00:00 2001 From: Sophia Kovaleva Date: Wed, 15 Nov 2017 13:46:59 -0800 Subject: [PATCH 19/31] minor fix --- ml_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ml_util.py b/ml_util.py index 8454ae5..3b50841 100644 --- a/ml_util.py +++ b/ml_util.py @@ -165,7 +165,7 @@ def __init__( self, dataset, sensitive=None, target=None): self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) self.sup_ind['Gender'] = ['Gender'] - if sensitive is None: + if sensitive is None or senstive == 'Gender': self.get_sensitive = (lambda X: X['Gender']) elif (sensitive == 'Race'): self.get_sensitive = (lambda X: X['Race_"Black"']) From 57c299882deb89e239384aca3e40eb23bee1c310 Mon Sep 17 00:00:00 2001 From: Sophia Kovaleva Date: Wed, 15 Nov 2017 13:47:38 -0800 Subject: [PATCH 20/31] minor fix --- ml_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ml_util.py b/ml_util.py index 3b50841..51c4cda 100644 --- a/ml_util.py +++ b/ml_util.py @@ -165,7 +165,7 @@ def __init__( self, dataset, sensitive=None, target=None): self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) self.sup_ind['Gender'] = ['Gender'] - if sensitive is None or senstive == 'Gender': + if sensitive is None or sensitive == 'Gender': self.get_sensitive = (lambda X: X['Gender']) elif (sensitive == 'Race'): self.get_sensitive = (lambda X: X['Race_"Black"']) From c4ba1d8e06449c879217f1df370904941ca447ec Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Thu, 23 Nov 2017 14:48:17 -0800 Subject: [PATCH 21/31] feature variation plots --- ml_util.py | 727 +++++++++++++++++++++++++++-------------------------- qii.py | 200 +++++++++------ qii_lib.py | 528 +++++++++++++++++++++----------------- 3 files changed, 787 insertions(+), 668 deletions(-) diff --git a/ml_util.py b/ml_util.py index b1bc248..ed357fb 100644 --- a/ml_util.py +++ b/ml_util.py @@ -23,28 +23,33 @@ from qii_lib import * - -#labelfont = {'fontname':'Times New Roman', 'size':15} +# labelfont = {'fontname':'Times New Roman', 'size':15} labelfont = {} -#hfont = {'fontname':'Helvetica'} + + +# hfont = {'fontname':'Helvetica'} def get_column_index(data, cname): - try: - idx = data.columns.get_loc(cname) - except Exception as e: - raise ValueError("Unknown column %s" % cname) - - return idx + try: + idx = data.columns.get_loc(cname) + except Exception as e: + raise ValueError("Unknown column %s" % cname) + + return idx + def encode_nominal(col): - if col.dtype == object: - return LabelEncoder().fit_transform(col) - else: - return col + if col.dtype == object: + return LabelEncoder().fit_transform(col) + else: + return col + import argparse + + class Dataset(object): - """ + """ Class that holds a dataset. Each dataset has its own quirks and needs some special processing to get to the point where we need it to. @@ -70,371 +75,377 @@ class Dataset(object): or the sensitive column from a dataset """ - def __init__( self, dataset, sensitive=None, target=None): - self.name = dataset - - # Warfarin dosage dataset - if (dataset == 'iwpc'): - self.num_data = pd.DataFrame.from_records( - arff.load('data/iwpc/iwpc_train_class.arff'), - columns=[ - 'index', 'race=black', 'race=asian', 'age', 'height', 'weight', 'amiodarone', - 'cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22', - 'vkorc1=CT', 'vkorc1=TT', 'decr', 'dose' - ]) - self.sup_ind = {} - self.sup_ind['race'] = ['race=black','race=asian'] - self.sup_ind['age'] = ['age'] - self.sup_ind['height'] = ['height'] - self.sup_ind['weight'] = ['weight'] - self.sup_ind['amiodarone'] = ['amiodarone'] - self.sup_ind['cyp2c9'] = ['cyp2c9=13','cyp2c9=12','cyp2c9=23','cyp2c9=33','cyp2c9=22'] - self.sup_ind['vkorc1'] = ['vkorc1=CT','vkorc1=TT'] - self.sup_ind['decr'] = ['decr'] - self.sup_ind['dose'] = ['dose'] - self.target_ix = 'dose' - self.sensitive_ix = 'race=black' - if sensitive is None: - self.get_sensitive = (lambda X: X['race=black']) - - self.target = self.num_data['dose'] - self.num_data = self.num_data.drop(['index'], axis = 1) - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind['dose'] - - - #Adult dataset - elif (dataset == 'adult'): - self.original_data = pd.read_csv( - "data/adult/adult.data", - names=[ - "Age", "Workclass", "fnlwgt", "Education", "Education-Num", "Marital Status", - "Occupation", "Relationship", "Race", "Gender", "Capital Gain", "Capital Loss", - "Hours per week", "Country", "Target"], - sep=r'\s*,\s*', - engine='python', - na_values="?") - del self.original_data['fnlwgt'] - self.sup_ind = make_super_indices(self.original_data) - self.num_data = pd.get_dummies(self.original_data) - self.target_ix = 'Target' - self.sensitive_ix = sensitive - - #Define and dedup Target - self.target = self.num_data['Target_>50K'] - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind['Target'] - - #Dedup Gender - self.num_data['Gender'] = self.num_data['Gender_Male'] - self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) - self.sup_ind['Gender'] = ['Gender'] - - if sensitive is None: - self.get_sensitive = (lambda X: X['Gender']) - elif (sensitive == ''): - self.get_sensitive = (lambda X: None) - else: - raise ValueError('Cannot handle sensitive '+sensitive+' in dataset '+dataset) - - - #National Longitudinal Survey of Youth 97 - elif (dataset == 'nlsy97'): - self.original_data = pd.read_csv( - "data/nlsy97/20151026/processed_output.csv", - names = ["PUBID.1997", "Gender", "Birth Year", "Census Region", - "Race", "Arrests", "Drug History", "Smoking History"], - sep=r'\s*,\s*', - engine='python', - quoting=2, - na_values="?") - del self.original_data['PUBID.1997'] - self.target_ix = 'Arrests' - self.sensitive_ix = sensitive - self.sup_ind = make_super_indices(self.original_data) - self.num_data = pd.get_dummies(self.original_data) - - #Define and dedup Target - self.target = (self.num_data['Arrests'] > 0)*1. - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind[self.target_ix] - - #Dedup Gender - self.num_data['Gender'] = self.num_data['Gender_"Male"'] - self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) - self.sup_ind['Gender'] = ['Gender'] - - if sensitive is None: - self.get_sensitive = (lambda X: X['Gender']) - elif (sensitive == 'Race'): - self.get_sensitive = (lambda X: X['Race_"Black"']) - else: - raise ValueError('Cannot handle sensitive '+sensitive+' in dataset '+dataset) - - - #German Datset (Incomplete) - elif (dataset == 'german'): - #http://programming-r-pro-bro.blogspot.com/2011/09/modelling-with-r-part-1.html - original_data = pd.read_csv( - "data/german/processed_output.csv", - names = ["PUBID.1997", "Gender", "Birth Year", "Census Region", - "Race", "Arrests", "Drug History", "Smoking History"], - sep=r'\s*,\s*', - engine='python', - na_values="?") - - elif exists(dataset): - print "loading new dataset %s" % dataset - - self.original_data = pd.read_csv(dataset) - - if target is None: - target = self.original_data.columns[-1] - self.target_ix = target - if self.target_ix not in self.original_data: - raise ValueError("unknown target feature %s" % self.target_ix) - - if sensitive is None: - sensitive = self.original_data.columns[0] - self.sensitive_ix = sensitive - if self.sensitive_ix not in self.original_data: - raise ValueError("unkown sensitive feature %s" % self.sensitive_ix) - - if self.sensitive_ix == self.target_ix: - print "WARNING: target and sensitive attributes are the same (%s), I'm unsure whether this tool handles this case correctly" % target - - nominal_cols = set(self.original_data.select_dtypes(include=['object']).columns) - - self.num_data = pd.get_dummies( - self.original_data, - prefix_sep='_', - columns=nominal_cols-set([target,sensitive])) - - self.num_data = self.num_data.apply(encode_nominal) - - self.sup_ind = make_super_indices(self.original_data) - - if self.target_ix in nominal_cols: - targets = len(set(self.original_data[target])) - if targets > 2: - print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (target, targets) - del self.sup_ind[self.target_ix] - # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) - - if self.sensitive_ix in nominal_cols: - targets = len(set(self.original_data[sensitive])) - if targets > 2: - print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (sensitive, targets) - self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] - # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) - - self.target = self.num_data[self.target_ix] - self.num_data = self.num_data.drop([self.target_ix], axis = 1) - - self.get_sensitive = lambda X: X[self.sensitive_ix] - - print "target feature = %s" % self.target_ix - print "sensitive feature = %s" % self.sensitive_ix - - else: - raise ValueError("Unknown dataset %s" % dataset) - - def delete_index ( self, index ): - self.num_data.drop(self.sup_ind[index], axis = 1) - del self.sup_ind[index] - - -#Categorical features are encoded as binary features, one for each category -#A super index keeps track of the mapping between a feature and its binary representation -def make_super_indices( dataset ): - sup_ind = {} - for i in dataset.columns: - if dataset[i].dtype != 'O': - sup_ind[i] = [i] - else: - unique = filter(lambda v: v==v, dataset[i].unique()) - sup_ind[i] = [i + '_' + s for s in unique] - return sup_ind + def __init__(self, dataset, sensitive=None, target=None): + self.name = dataset + + # Warfarin dosage dataset + if (dataset == 'iwpc'): + self.num_data = pd.DataFrame.from_records( + arff.load('data/iwpc/iwpc_train_class.arff'), + columns=[ + 'index', 'race=black', 'race=asian', 'age', 'height', 'weight', 'amiodarone', + 'cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22', + 'vkorc1=CT', 'vkorc1=TT', 'decr', 'dose' + ]) + self.sup_ind = {} + self.sup_ind['race'] = ['race=black', 'race=asian'] + self.sup_ind['age'] = ['age'] + self.sup_ind['height'] = ['height'] + self.sup_ind['weight'] = ['weight'] + self.sup_ind['amiodarone'] = ['amiodarone'] + self.sup_ind['cyp2c9'] = ['cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22'] + self.sup_ind['vkorc1'] = ['vkorc1=CT', 'vkorc1=TT'] + self.sup_ind['decr'] = ['decr'] + self.sup_ind['dose'] = ['dose'] + self.target_ix = 'dose' + self.sensitive_ix = 'race=black' + if sensitive is None: + self.get_sensitive = (lambda X: X['race=black']) + + self.target = self.num_data['dose'] + self.num_data = self.num_data.drop(['index'], axis=1) + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind['dose'] + + + # Adult dataset + elif (dataset == 'adult'): + self.original_data = pd.read_csv( + "data/adult/adult.data", + names=[ + "Age", "Workclass", "fnlwgt", "Education", "Education-Num", "Marital Status", + "Occupation", "Relationship", "Race", "Gender", "Capital Gain", "Capital Loss", + "Hours per week", "Country", "Target"], + sep=r'\s*,\s*', + engine='python', + na_values="?") + del self.original_data['fnlwgt'] + self.sup_ind = make_super_indices(self.original_data) + self.num_data = pd.get_dummies(self.original_data) + self.target_ix = 'Target' + self.sensitive_ix = sensitive + + # Define and dedup Target + self.target = self.num_data['Target_>50K'] + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind['Target'] + + # Dedup Gender + self.num_data['Gender'] = self.num_data['Gender_Male'] + self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis=1) + self.sup_ind['Gender'] = ['Gender'] + + if sensitive is None: + self.get_sensitive = (lambda X: X['Gender']) + elif (sensitive == ''): + self.get_sensitive = (lambda X: None) + else: + raise ValueError('Cannot handle sensitive ' + sensitive + ' in dataset ' + dataset) + + + # National Longitudinal Survey of Youth 97 + elif (dataset == 'nlsy97'): + self.original_data = pd.read_csv( + "data/nlsy97/20151026/processed_output.csv", + names=["PUBID.1997", "Gender", "Birth Year", "Census Region", + "Race", "Arrests", "Drug History", "Smoking History"], + sep=r'\s*,\s*', + engine='python', + quoting=2, + na_values="?") + del self.original_data['PUBID.1997'] + self.target_ix = 'Arrests' + self.sensitive_ix = sensitive + self.sup_ind = make_super_indices(self.original_data) + self.num_data = pd.get_dummies(self.original_data) + + # Define and dedup Target + self.target = (self.num_data['Arrests'] > 0) * 1. + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind[self.target_ix] + + # Dedup Gender + self.num_data['Gender'] = self.num_data['Gender_"Male"'] + self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis=1) + self.sup_ind['Gender'] = ['Gender'] + + if sensitive is None: + self.get_sensitive = (lambda X: X['Gender']) + elif (sensitive == 'Race'): + self.get_sensitive = (lambda X: X['Race_"Black"']) + else: + raise ValueError('Cannot handle sensitive ' + sensitive + ' in dataset ' + dataset) + + + # German Datset (Incomplete) + elif (dataset == 'german'): + # http://programming-r-pro-bro.blogspot.com/2011/09/modelling-with-r-part-1.html + original_data = pd.read_csv( + "data/german/processed_output.csv", + names=["PUBID.1997", "Gender", "Birth Year", "Census Region", + "Race", "Arrests", "Drug History", "Smoking History"], + sep=r'\s*,\s*', + engine='python', + na_values="?") + + elif exists(dataset): + print "loading new dataset %s" % dataset + + self.original_data = pd.read_csv(dataset) + + if target is None: + target = self.original_data.columns[-1] + self.target_ix = target + if self.target_ix not in self.original_data: + raise ValueError("unknown target feature %s" % self.target_ix) + + if sensitive is None: + sensitive = self.original_data.columns[0] + self.sensitive_ix = sensitive + if self.sensitive_ix not in self.original_data: + raise ValueError("unkown sensitive feature %s" % self.sensitive_ix) + + if self.sensitive_ix == self.target_ix: + print "WARNING: target and sensitive attributes are the same (%s), I'm unsure whether this tool handles this case correctly" % target + + nominal_cols = set(self.original_data.select_dtypes(include=['object']).columns) + + self.num_data = pd.get_dummies( + self.original_data, + prefix_sep='_', + columns=nominal_cols - set([target, sensitive])) + + self.num_data = self.num_data.apply(encode_nominal) + + self.sup_ind = make_super_indices(self.original_data) + + if self.target_ix in nominal_cols: + targets = len(set(self.original_data[target])) + if targets > 2: + print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( + target, targets) + del self.sup_ind[self.target_ix] + # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) + + if self.sensitive_ix in nominal_cols: + targets = len(set(self.original_data[sensitive])) + if targets > 2: + print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( + sensitive, targets) + self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] + # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) + + self.target = self.num_data[self.target_ix] + self.num_data = self.num_data.drop([self.target_ix], axis=1) + + self.get_sensitive = lambda X: X[self.sensitive_ix] + + print "target feature = %s" % self.target_ix + print "sensitive feature = %s" % self.sensitive_ix + + else: + raise ValueError("Unknown dataset %s" % dataset) + + def delete_index(self, index): + self.num_data.drop(self.sup_ind[index], axis=1) + del self.sup_ind[index] + + +# Categorical features are encoded as binary features, one for each category +# A super index keeps track of the mapping between a feature and its binary representation +def make_super_indices(dataset): + sup_ind = {} + for i in dataset.columns: + if dataset[i].dtype != 'O': + sup_ind[i] = [i] + else: + unique = filter(lambda v: v == v, dataset[i].unique()) + sup_ind[i] = [i + '_' + s for s in unique] + return sup_ind ## Parse arguments def get_arguments(): - parser = argparse.ArgumentParser() - parser.add_argument('dataset', help='Name of dataset used') - parser.add_argument('-m', '--measure', - default='average-unary-individual', - help='Quantity of interest', - choices=['average-unary-individual','unary-individual', - 'discrim', 'banzhaf', 'shapley', 'average-unary-class']) - parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') - parser.add_argument('-t', '--target', default=None, help='Target field', type=str) - - parser.add_argument('-e', '--erase-sensitive', action='store_false', help='Erase sensitive field from dataset') - parser.add_argument('-p', '--show-plot', action='store_true', help='Output plot as pdf') - parser.add_argument('-o', '--output-pdf', action='store_true', help='Output plot as pdf') - parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', - choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) - - parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') - parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') - parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) - - parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') - parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') - parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') - parser.add_argument('-q', '--class_influence', default=None, type=int, help='Index of the the target class for causal analysis') - - args = parser.parse_args() - if args.seed is not None: - numpy.random.seed([args.seed]) - - return args + parser = argparse.ArgumentParser() + parser.add_argument('dataset', help='Name of dataset used') + parser.add_argument('-m', '--measure', + default='average-unary-individual', + help='Quantity of interest', + choices=['average-unary-individual', 'unary-individual', + 'discrim', 'banzhaf', 'shapley', 'average-unary-class']) + parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') + parser.add_argument('-t', '--target', default=None, help='Target field', type=str) + + parser.add_argument('-e', '--erase-sensitive', action='store_false', help='Erase sensitive field from dataset') + parser.add_argument('-p', '--show-plot', action='store_true', help='Output plot as pdf') + parser.add_argument('-o', '--output-pdf', action='store_true', help='Output plot as pdf') + parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', + choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) + + parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') + parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') + parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) + + parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') + parser.add_argument('-r', '--record-counterfactuals', action='store_true', + help='Store counterfactual pairs for causal analysis') + parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') + parser.add_argument('-q', '--class_influence', default=None, type=int, + help='Index of the the target class for causal analysis') + + args = parser.parse_args() + if args.seed is not None: + numpy.random.seed([args.seed]) + + return args + class Setup(argparse.Namespace): - def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): - self.cls = cls - self.x_test = x_test - self.y_test = y_test - self.x_target_class = x_target_class - self.sens_test = sens_test - #for k in kw: - # self.__setattr__(k, kw[k]) - argparse.Namespace.__init__(self, **kw) - -def split_and_train_classifier(args, dataset, scaler=None): - classifier = args.classifier - ## Split data into training and test data - x_train, x_test, y_train, y_test = cross_validation.train_test_split( - dataset.num_data, dataset.target, - train_size=0.40, - ) - - x_target_class = None - if args.class_influence is not None: - target_class_type = type(y_test.iloc[0]) - target_class = target_class_type(args.class_influence) - x_target_class = x_test[y_test == target_class] - - sens_train = dataset.get_sensitive(x_train) - sens_test = dataset.get_sensitive(x_test) - - if (scaler == None): - #Initialize scaler to normalize training data - scaler = preprocessing.StandardScaler() - scaler.fit(x_train) - - #Normalize all training and test data - x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) - x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) - if x_target_class is not None: - x_target_class = pd.DataFrame(scaler.transform(x_target_class), columns=(dataset.num_data.columns)) - - cls = train_classifier(args, x_train, y_train) - - return Setup(cls = cls, - scaler = scaler, - x_train = x_train, - x_test = x_test, - y_train = y_train, - y_test = y_test, - x_target_class = x_target_class, - sens_train = sens_train, - sens_test = sens_test) + def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): + self.cls = cls + self.x_test = x_test + self.y_test = y_test + self.x_target_class = x_target_class + self.sens_test = sens_test + # for k in kw: + # self.__setattr__(k, kw[k]) + argparse.Namespace.__init__(self, **kw) + + +def split_and_train_classifier(args, dataset, scaler=None, normalize=False): + classifier = args.classifier + ## Split data into training and test data + x_train, x_test, y_train, y_test = cross_validation.train_test_split( + dataset.num_data, dataset.target, + train_size=0.40, + ) + + x_target_class = None + if args.class_influence is not None: + target_class_type = type(y_test.iloc[0]) + target_class = target_class_type(args.class_influence) + x_target_class = x_test[y_test == target_class] + + sens_train = dataset.get_sensitive(x_train) + sens_test = dataset.get_sensitive(x_test) + if normalize: + if (scaler == None): + # Initialize scaler to normalize training data + scaler = preprocessing.StandardScaler() + scaler.fit(x_train) + + # Normalize all training and test data + x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) + x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) + if x_target_class is not None: + x_target_class = pd.DataFrame(scaler.transform(x_target_class), columns=(dataset.num_data.columns)) + + cls = train_classifier(args, x_train, y_train) + + return Setup(cls=cls, + scaler=scaler, + x_train=x_train, + x_test=x_test, + y_train=y_train, + y_test=y_test, + x_target_class=x_target_class, + sens_train=sens_train, + sens_test=sens_test) def train_classifier(args, X_train, y_train): - classifier = args.classifier - #Initialize sklearn classifier model - if (classifier == 'logistic'): - import sklearn.linear_model as linear_model - cls = linear_model.LogisticRegression() - elif (classifier == 'svm'): - from sklearn import svm - cls = svm.SVC(kernel='linear', cache_size=7000, - ) - elif (classifier == 'decision-tree'): - import sklearn.linear_model as linear_model - cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, - ) - elif (classifier == 'decision-forest'): - from sklearn.ensemble import GradientBoostingClassifier - cls = GradientBoostingClassifier(n_estimators=args.n_estimators, - learning_rate=1.0, - max_depth=args.max_depth, - ) - - #Train sklearn model - cls.fit(X_train, y_train) - return cls - + classifier = args.classifier + # Initialize sklearn classifier model + if (classifier == 'logistic'): + import sklearn.linear_model as linear_model + cls = linear_model.LogisticRegression() + elif (classifier == 'svm'): + from sklearn import svm + cls = svm.SVC(kernel='linear', cache_size=7000, + ) + elif (classifier == 'decision-tree'): + import sklearn.linear_model as linear_model + cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, + ) + elif (classifier == 'decision-forest'): + from sklearn.ensemble import GradientBoostingClassifier + cls = GradientBoostingClassifier(n_estimators=args.n_estimators, + learning_rate=1.0, + max_depth=args.max_depth, + ) + + # Train sklearn model + cls.fit(X_train, y_train) + return cls def plot_series(series, args, xlabel, ylabel): - plt.figure(figsize=(5,4)) - series.sort_values(inplace=True, ascending=False) - #average_local_inf_series.plot(kind="bar", facecolor='#ff9999', edgecolor='white') - series.plot(kind="bar") - plt.xticks(rotation = 45, ha = 'right', size='small') - plt.xlabel(xlabel, labelfont) - plt.ylabel(ylabel, labelfont) - plt.tight_layout() - if (args.output_pdf == True): - pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier +'.pdf') - print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') - pp.savefig(bbox_inches='tight') - pp.close() - if (args.show_plot == True): - plt.show() + plt.figure(figsize=(5, 4)) + series.sort_values(inplace=True, ascending=False) + # average_local_inf_series.plot(kind="bar", facecolor='#ff9999', edgecolor='white') + series.plot(kind="bar") + plt.xticks(rotation=45, ha='right', size='small') + plt.xlabel(xlabel, labelfont) + plt.ylabel(ylabel, labelfont) + plt.tight_layout() + if (args.output_pdf == True): + pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') + print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') + pp.savefig(bbox_inches='tight') + pp.close() + if (args.show_plot == True): + plt.show() def plot_series_with_baseline(series, args, xlabel, ylabel, baseline): - series.sort(ascending = True) - plt.figure(figsize=(5,4)) - #plt.bar(range(series.size), series.as_matrix() - baseline) - #(series - baseline).plot(kind="bar", facecolor='#ff9999', edgecolor='white') - (series - baseline).plot(kind="bar") - #plt.xticks(range(series.size), series.keys(), size='small') - x1,x2,y1,y2 = plt.axis() - X = range(series.size) - for x,y in zip(X,series.as_matrix() - baseline): - x_wd = 1. / series.size - if(y < 0): - plt.text(x+x_wd/2, y-0.01, '%.2f' % (y), ha='center', va= 'bottom', size='small') - else: - plt.text(x+x_wd/2, y+0.01, '%.2f' % (y), ha='center', va= 'top', size='small') - plt.axis((x1,x2,-baseline,y2 + 0.01)) - plt.xticks(rotation = 45, ha = 'right', size='small') - plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x,_: '%1.2f' % (x + baseline))) - plt.axhline(linestyle = 'dashed', color = 'black') - plt.text(x_wd, 0, 'Original Discrimination', ha = 'left', va = 'bottom') - plt.xlabel(xlabel, labelfont) - plt.ylabel(ylabel, labelfont) - plt.tight_layout() - if (args.output_pdf == True): - pp = PdfPages('figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') - print ('Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') - pp.savefig() - pp.close() - plt.show() + series.sort(ascending=True) + plt.figure(figsize=(5, 4)) + # plt.bar(range(series.size), series.as_matrix() - baseline) + # (series - baseline).plot(kind="bar", facecolor='#ff9999', edgecolor='white') + (series - baseline).plot(kind="bar") + # plt.xticks(range(series.size), series.keys(), size='small') + x1, x2, y1, y2 = plt.axis() + X = range(series.size) + for x, y in zip(X, series.as_matrix() - baseline): + x_wd = 1. / series.size + if (y < 0): + plt.text(x + x_wd / 2, y - 0.01, '%.2f' % (y), ha='center', va='bottom', size='small') + else: + plt.text(x + x_wd / 2, y + 0.01, '%.2f' % (y), ha='center', va='top', size='small') + plt.axis((x1, x2, -baseline, y2 + 0.01)) + plt.xticks(rotation=45, ha='right', size='small') + plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x, _: '%1.2f' % (x + baseline))) + plt.axhline(linestyle='dashed', color='black') + plt.text(x_wd, 0, 'Original Discrimination', ha='left', va='bottom') + plt.xlabel(xlabel, labelfont) + plt.ylabel(ylabel, labelfont) + plt.tight_layout() + if (args.output_pdf == True): + pp = PdfPages( + 'figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + print ( + 'Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + pp.savefig() + pp.close() + plt.show() def measure_analytics(dataset, cls, X, y, sens=None): - y_pred = cls.predict(X) - - error_rate = numpy.mean((y_pred != y)*1.) - print('test error rate: %.3f' % error_rate) + y_pred = cls.predict(X) - discrim0 = discrim(numpy.array(X), cls, numpy.array(sens)) - print('Initial Discrimination: %.3f' % discrim0) + error_rate = numpy.mean((y_pred != y) * 1.) + print('test error rate: %.3f' % error_rate) - from scipy.stats.stats import pearsonr - corr0 = pearsonr(sens, y)[0] - print('Correlation: %.3f' % corr0) + discrim0 = discrim(numpy.array(X), cls, numpy.array(sens)) + print('Initial Discrimination: %.3f' % discrim0) - ji = metrics.jaccard_similarity_score(y, sens) - print('JI: %.3f' % ji) + from scipy.stats.stats import pearsonr + corr0 = pearsonr(sens, y)[0] + print('Correlation: %.3f' % corr0) - mi = metrics.normalized_mutual_info_score(y, sens) - print('MI: %.3f' % mi) + ji = metrics.jaccard_similarity_score(y, sens) + print('JI: %.3f' % ji) + mi = metrics.normalized_mutual_info_score(y, sens) + print('MI: %.3f' % mi) diff --git a/qii.py b/qii.py index c4e5f56..53cb15c 100644 --- a/qii.py +++ b/qii.py @@ -5,123 +5,165 @@ """ import time - +import pdb import pandas as pd import numpy - +import matplotlib.pyplot as plt import numpy.linalg +from matplotlib.backends.backend_pdf import PdfPages from ml_util import split_and_train_classifier, get_arguments, \ - Dataset, measure_analytics, \ - plot_series_with_baseline, plot_series + Dataset, measure_analytics, \ + plot_series_with_baseline, plot_series import qii_lib + def __main__(): - args = get_arguments() - qii_lib.record_counterfactuals = args.record_counterfactuals + args = get_arguments() + qii_lib.record_counterfactuals = args.record_counterfactuals - #Read dataset - dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) - #Get column names - #f_columns = dataset.num_data.columns - #sup_ind = dataset.sup_ind + # Read dataset + dataset = Dataset(args.dataset, sensitive=args.sensitive, target=args.target) + # Get column names + # f_columns = dataset.num_data.columns + # sup_ind = dataset.sup_ind - ######### Begin Training Classifier ########## + ######### Begin Training Classifier ########## - dat = split_and_train_classifier(args, dataset) + dat = split_and_train_classifier(args, dataset) - print 'End Training Classifier' - ######### End Training Classifier ########## + print 'End Training Classifier' + ######### End Training Classifier ########## - measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) + measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) - t_start = time.time() + t_start = time.time() - measures = {'discrim': eval_discrim, - 'average-unary-individual': eval_average_unary_individual, - 'unary-individual': eval_unary_individual, - 'banzhaf': eval_banzhaf, - 'shapley': eval_shapley, - 'average-unary-class' : eval_class_average_unary } + measures = {'discrim': eval_discrim, + 'average-unary-individual': eval_average_unary_individual, + 'unary-individual': eval_unary_individual, + 'banzhaf': eval_banzhaf, + 'shapley': eval_shapley, + 'average-unary-class': eval_class_average_unary} - if args.measure in measures: - measures[args.measure](dataset, args, dat) - else: - raise ValueError("Unknown measure %s" % args.measure) + if args.measure in measures: + measures[args.measure](dataset, args, dat) + else: + raise ValueError("Unknown measure %s" % args.measure) - t_end = time.time() + t_end = time.time() + + print t_end - t_start - print t_end - t_start def eval_discrim(dataset, args, dat): - """ Discrimination metric """ + """ Discrimination metric """ + + baseline = qii_lib.discrim(numpy.array(dat.x_test), dat.cls, numpy.array(dat.sens_test)) + discrim_inf = qii_lib.discrim_influence(dataset, dat.cls, dat.x_test, dat.sens_test) + discrim_inf_series = pd.Series(discrim_inf, index=discrim_inf.keys()) + if args.show_plot: + plot_series_with_baseline( + discrim_inf_series, args, + 'Feature', 'QII on Group Disparity', + baseline) - baseline = qii_lib.discrim(numpy.array(dat.x_test), dat.cls, numpy.array(dat.sens_test)) - discrim_inf = qii_lib.discrim_influence(dataset, dat.cls, dat.x_test, dat.sens_test) - discrim_inf_series = pd.Series(discrim_inf, index=discrim_inf.keys()) - if args.show_plot: - plot_series_with_baseline( - discrim_inf_series, args, - 'Feature', 'QII on Group Disparity', - baseline) def eval_average_unary_individual(dataset, args, dat): - """ Unary QII averaged over all individuals. """ + """ Unary QII averaged over all individuals. """ + + average_local_inf, _ = qii_lib.average_local_influence( + dataset, dat.cls, dat.x_test) + average_local_inf_series = pd.Series(average_local_inf, + index=average_local_inf.keys()) + top_40 = average_local_inf_series.sort_values(ascending=False).head(40) + if args.show_plot or args.output_pdf: + plot_series(top_40, args, + 'Feature', 'QII on Outcomes') - average_local_inf, _ = qii_lib.average_local_influence( - dataset, dat.cls, dat.x_test) - average_local_inf_series = pd.Series(average_local_inf, - index=average_local_inf.keys()) - top_40 = average_local_inf_series.sort_values(ascending=False).head(40) - if args.show_plot or args.output_pdf: - plot_series(top_40, args, - 'Feature', 'QII on Outcomes') def eval_unary_individual(dataset, args, dat): - """ Unary QII. """ + """ Unary QII. """ + + x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual].reshape(1, -1)) + average_local_inf, _ = qii_lib.unary_individual_influence( + dataset, dat.cls, x_individual, dat.x_test) + average_local_inf_series = pd.Series( + average_local_inf, index=average_local_inf.keys()) + if args.show_plot or args.output_pdf: + plot_series(average_local_inf_series, args, + 'Feature', 'QII on Outcomes') - x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual].reshape(1, -1)) - average_local_inf, _ = qii_lib.unary_individual_influence( - dataset, dat.cls, x_individual, dat.x_test) - average_local_inf_series = pd.Series( - average_local_inf, index=average_local_inf.keys()) - if args.show_plot or args.output_pdf: - plot_series(average_local_inf_series, args, - 'Feature', 'QII on Outcomes') def eval_banzhaf(dataset, args, dat): - """ Banzhaf metric. """ + """ Banzhaf metric. """ + + x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual]) - x_individual = dat.scaler.transform(dataset.num_data.ix[args.individual]) + banzhaf = qii_lib.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) + banzhaf_series = pd.Series(banzhaf, index=banzhaf.keys()) + if args.show_plot or args.output_pdf: + plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') - banzhaf = qii_lib.banzhaf_influence(dataset, dat.cls, x_individual, dat.x_test) - banzhaf_series = pd.Series(banzhaf, index=banzhaf.keys()) - if args.show_plot or args.output_pdf: - plot_series(banzhaf_series, args, 'Feature', 'QII on Outcomes (Banzhaf)') def eval_shapley(dataset, args, dat): - """ Shapley metric. """ + """ Shapley metric. """ - row_individual = dataset.num_data.ix[args.individual].reshape(1, -1) + row_individual = dataset.num_data.ix[args.individual].reshape(1, -1) - x_individual = dat.scaler.transform(row_individual) + x_individual = dat.scaler.transform(row_individual) + + shapley, _ = qii_lib.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) + shapley_series = pd.Series(shapley, index=shapley.keys()) + if args.show_plot or args.output_pdf: + plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') - shapley, _ = qii_lib.shapley_influence(dataset, dat.cls, x_individual, dat.x_test) - shapley_series = pd.Series(shapley, index=shapley.keys()) - if args.show_plot or args.output_pdf: - plot_series(shapley_series, args, 'Feature', 'QII on Outcomes (Shapley)') def eval_class_average_unary(dataset, args, dat): - """ Unary QII averaged over all individuals for a particular class """ - - average_local_inf, _ = qii_lib.average_local_class_influence( - dataset, dat.cls, dat.x_test, dat.x_target_class) - average_local_inf_series = pd.Series(average_local_inf, - index=average_local_inf.keys()) - top_40 = average_local_inf_series.sort_values(ascending=False).head(40) - if args.show_plot or args.output_pdf: - plot_series(top_40, args, - 'Feature', 'QII on Outcomes') + """ Unary QII averaged over all individuals for a particular class """ + average_local_inf, _ = qii_lib.average_local_class_influence( + dataset, dat.cls, dat.x_test, dat.x_target_class) + average_local_inf_series = pd.Series(average_local_inf, + index=average_local_inf.keys()) + top_40 = average_local_inf_series.sort_values(ascending=False).head(40) + if args.show_plot or args.output_pdf: + plot_series(top_40, args, + 'Feature', 'QII on Outcomes') + top_5 = average_local_inf_series.sort_values(ascending=False).head(5) + get_feature_variation_plots(top_5, dataset, args, dat) + + +def get_feature_variation_plots(features_list, dataset, args, dat): + def plot_histogram(dataframe): + data = dataframe.copy() + data = data.drop(['feature', 'class'], axis=1) + data = data.set_index('bin_edges') + data.hist() + del data + + feature_variations = pd.DataFrame() + for cls in dat.y_test.unique(): + x_target_class = dat.x_test[dat.y_test == cls] + feature_variations = feature_variations.append(qii_lib.get_feature_variations(features_list, + dataset, dat.cls, dat.x_test, + x_target_class, cls)) + + figures_count = 1 + for index, group in feature_variations.groupby(['feature']): + plt.figure(figures_count) + for class_index, class_group in group.groupby(['class']): + plt.plot(class_group['bin_edges'], class_group['influences'], label=class_index) + plt.legend(loc='best') + plt.title(index) + if args.output_pdf: + pp = PdfPages('figure-' + index + '-' + args.classifier + '.pdf') + print ('Writing to figure-' + index + '-' + args.classifier + '.pdf') + pp.savefig(bbox_inches='tight') + pp.close() + if args.show_plot: + plt.show() + figures_count += 1 + __main__() diff --git a/qii_lib.py b/qii_lib.py index 8222b1f..3bfa3a7 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -2,272 +2,338 @@ import pandas as pd import numpy +import pdb +from scipy.stats import binned_statistic RECORD_COUNTERFACTUALS = False + def intervene(X, features, x0): - """ Constant intervention """ + """ Constant intervention """ + + X = numpy.array(X, copy=True) + x0 = x0.T + for f in features: + X[:, f] = x0[f] + return X - X = numpy.array(X, copy=True) - x0 = x0.T - for f in features: - X[:, f] = x0[f] - return X def causal_measure(clf, X, ep_state, f, x0): - """ Causal Measure with a constant intervention. """ + """ Causal Measure with a constant intervention. """ - c0 = clf.predict(x0) - X1 = intervene(X, ep_state, x0) - p1 = numpy.mean(1.*(clf.predict(X1) == c0)) + c0 = clf.predict(x0) + X1 = intervene(X, ep_state, x0) + p1 = numpy.mean(1. * (clf.predict(X1) == c0)) - X2 = intervene(X, ep_state + [f], x0) - p2 = numpy.mean(1.*(clf.predict(X2) == c0)) + X2 = intervene(X, ep_state + [f], x0) + p2 = numpy.mean(1. * (clf.predict(X2) == c0)) + + return p2 - p1 - return p2 - p1 def random_intervene(X, cols): - """ Randomly intervene on a a set of columns of X. """ + """ Randomly intervene on a a set of columns of X. """ + + n = X.shape[0] + order = numpy.random.permutation(range(n)) + X_int = numpy.array(X) + for c in cols: + X_int[:, c] = X_int[order, c] + return X_int - n = X.shape[0] - order = numpy.random.permutation(range(n)) - X_int = numpy.array(X) - for c in cols: - X_int[:, c] = X_int[order, c] - return X_int def random_intervene_class(X, target_class_X, cols): - """ Randomly intervene on a a set of columns of target_class_X. """ - n = X.shape[0] - p = target_class_X.shape[0] - order = numpy.random.choice(n,p) - target_int = numpy.array(target_class_X) - for c in cols: - target_int[:, c] = X[order, c] - return target_int + """ Randomly intervene on a a set of columns of target_class_X. """ + n = X.shape[0] + p = target_class_X.shape[0] + order = numpy.random.choice(n, p) + target_int = numpy.array(target_class_X) + for c in cols: + target_int[:, c] = X[order, c] + return target_int + + +def get_histogram_bins(X, cols, num_bins=40): + col = cols[0] + column_values = X[:, col] + _, bin_edges, binned_indices = binned_statistic(column_values, numpy.ones(len(column_values)), statistic='sum', + bins=num_bins) + return binned_indices, bin_edges + + +def yield_increasing_bins(X, target_class_X, binned_indices, cols, bin): + """ Randomly intervene on a a set of columns of target_class_X. """ + p = target_class_X.shape[0] + target_int = numpy.array(target_class_X) + indices_list = [index for index, value in enumerate(binned_indices) if value == bin] + n = len(indices_list) + if n == 0: + return None + else: + order = numpy.random.choice(n, p) + for c in cols: + target_int[:, c] = X[order, c] + return target_int def random_intervene_point(X, cols, x0): - """ Randomly intervene on a a set of columns of x from X. """ - n = X.shape[0] - order = numpy.random.permutation(range(n)) - X_int = numpy.tile(x0, (n, 1)) - for c in cols: - X_int[:, c] = X[order, c] - return X_int + """ Randomly intervene on a a set of columns of x from X. """ + n = X.shape[0] + order = numpy.random.permutation(range(n)) + X_int = numpy.tile(x0, (n, 1)) + for c in cols: + X_int[:, c] = X[order, c] + return X_int + def discrim(X, cls, sens): - not_sens = 1 - sens - y_pred = cls.predict(X) - discrim = numpy.abs(numpy.dot(y_pred, not_sens)/sum(not_sens) - - numpy.dot(y_pred, sens)/sum(sens)) - return discrim + not_sens = 1 - sens + y_pred = cls.predict(X) + discrim = numpy.abs(numpy.dot(y_pred, not_sens) / sum(not_sens) + - numpy.dot(y_pred, sens) / sum(sens)) + return discrim + def discrim_ratio(X, cls, sens): - not_sens = 1 - sens - y_pred = cls.predict(X) - sens_rate = numpy.dot(y_pred, sens)/sum(sens) - not_sens_rate = numpy.dot(y_pred, not_sens)/sum(not_sens) + not_sens = 1 - sens + y_pred = cls.predict(X) + sens_rate = numpy.dot(y_pred, sens) / sum(sens) + not_sens_rate = numpy.dot(y_pred, not_sens) / sum(not_sens) + + discrim = not_sens_rate / sens_rate + return discrim - discrim = not_sens_rate/sens_rate - return discrim def discrim_influence(dataset, cls, X_test, sens_test): - """ Measure influence on discrimination. """ - - discrim_inf = {} - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - for sf in sup_ind: - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - X_inter = random_intervene(numpy.array(X_test), ls) - discrim_inter = discrim(X_inter, cls, numpy.array(sens_test)) - discrim_inf[sf] = discrim_inter - print 'Discrimination %s: %.3f' % (sf, discrim_inf[sf]) - return discrim_inf + """ Measure influence on discrimination. """ + + discrim_inf = {} + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + for sf in sup_ind: + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + X_inter = random_intervene(numpy.array(X_test), ls) + discrim_inter = discrim(X_inter, cls, numpy.array(sens_test)) + discrim_inf[sf] = discrim_inter + print 'Discrimination %s: %.3f' % (sf, discrim_inf[sf]) + return discrim_inf + def average_local_influence(dataset, cls, X): - average_local_inf = {} - counterfactuals = {} - iters = 10 - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - y_pred = cls.predict(X) - for sf in sup_ind: - local_influence = numpy.zeros(y_pred.shape[0]) - if RECORD_COUNTERFACTUALS: - counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - for i in xrange(0, iters): - X_inter = random_intervene(numpy.array(X), ls) - y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter)*1. - if RECORD_COUNTERFACTUALS: - n = X_inter.shape[0] - counterfactuals[sf][1][i*n:(i+1)*n] = X_inter - - average_local_inf[sf] = 1 - (local_influence/iters).mean() - #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) - return (average_local_inf, counterfactuals) + average_local_inf = {} + counterfactuals = {} + iters = 10 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + y_pred = cls.predict(X) + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene(numpy.array(X), ls) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter) * 1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter + + average_local_inf[sf] = 1 - (local_influence / iters).mean() + # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + return (average_local_inf, counterfactuals) + def average_local_class_influence(dataset, cls, X, target_class_X): - average_local_inf_class = {} - counterfactuals = {} - iters = 10 - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - y_pred = cls.predict(target_class_X) - for sf in sup_ind: - local_influence = numpy.zeros(y_pred.shape[0]) - if RECORD_COUNTERFACTUALS: - counterfactuals[sf] = (numpy.tile(target_class_X, (iters, 1)), numpy.tile(target_class_X, (iters, 1))) - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - for i in xrange(0, iters): - X_inter = random_intervene_class(numpy.array(X), numpy.array(target_class_X), ls) - y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter)*1. - if RECORD_COUNTERFACTUALS: - n = X_inter.shape[0] - counterfactuals[sf][1][i*n:(i+1)*n] = X_inter - - average_local_inf_class[sf] = 1 - (local_influence/iters).mean() - #print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) - return (average_local_inf_class, counterfactuals) + average_local_inf_class = {} + counterfactuals = {} + iters = 10 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + y_pred = cls.predict(target_class_X) + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(target_class_X, (iters, 1)), numpy.tile(target_class_X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene_class(numpy.array(X), numpy.array(target_class_X), ls) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter) * 1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter + + average_local_inf_class[sf] = 1 - (local_influence / iters).mean() + # print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + return (average_local_inf_class, counterfactuals) + + +def get_feature_variations(features_list, dataset, cls, X, target_class_X, class_name): + average_local_inf_class = pd.DataFrame() + bins = 40 + iters = 10 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + y_pred = cls.predict(target_class_X) + indices = features_list.reset_index()['index'] + for sf in indices: + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + binned_indices, bin_edges = get_histogram_bins(numpy.array(X),ls,num_bins=bins) + feature_dataframe = pd.DataFrame({'bin_edges' : bin_edges[0:-1]}) + feature_dataframe['class'] = class_name + feature_dataframe['feature'] = sf + influences = [] + for bin in xrange(0, bins): + local_influence = numpy.zeros(y_pred.shape[0]) + for iter in xrange(0, iters): + X_inter = yield_increasing_bins(numpy.array(X), numpy.array(target_class_X),binned_indices, ls, bin) + if X_inter is not None: + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter) * 1. + influences.append((local_influence / iters).mean()) + feature_dataframe['influences'] = influences + average_local_inf_class = average_local_inf_class.append(feature_dataframe) + # print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + return average_local_inf_class def unary_individual_influence(dataset, cls, x_ind, X): - y_pred = cls.predict(x_ind.reshape(1, -1)) - average_local_inf = {} - counterfactuals = {} - iters = 1 - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - for sf in sup_ind: - local_influence = numpy.zeros(y_pred.shape[0]) - if RECORD_COUNTERFACTUALS: - counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) - ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - for i in xrange(0, iters): - X_inter = random_intervene_point(numpy.array(X), ls, x_ind) - y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter)*1. - if RECORD_COUNTERFACTUALS: - n = X_inter.shape[0] - counterfactuals[sf][1][i*n:(i+1)*n] = X_inter - - average_local_inf[sf] = 1 - (local_influence/iters).mean() - #print('Influence %s: %.3f' % (sf, average_local_inf[sf])) - return (average_local_inf, counterfactuals) + y_pred = cls.predict(x_ind.reshape(1, -1)) + average_local_inf = {} + counterfactuals = {} + iters = 1 + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + for sf in sup_ind: + local_influence = numpy.zeros(y_pred.shape[0]) + if RECORD_COUNTERFACTUALS: + counterfactuals[sf] = (numpy.tile(X, (iters, 1)), numpy.tile(X, (iters, 1))) + ls = [f_columns.get_loc(f) for f in sup_ind[sf]] + for i in xrange(0, iters): + X_inter = random_intervene_point(numpy.array(X), ls, x_ind) + y_pred_inter = cls.predict(X_inter) + local_influence = local_influence + (y_pred == y_pred_inter) * 1. + if RECORD_COUNTERFACTUALS: + n = X_inter.shape[0] + counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter + + average_local_inf[sf] = 1 - (local_influence / iters).mean() + # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + return (average_local_inf, counterfactuals) + def shapley_influence(dataset, cls, x_individual, X_test): - p_samples = 600 - s_samples = 600 - - def v(S, x, X_inter): - x_rep = numpy.tile(x, (p_samples, 1)) - for f in S: - x_rep[:, f] = X_inter[:, f] - p = ((cls.predict(x_rep) == y0)*1.).mean() - return (p, x_rep) - - #min_i = numpy.argmin(sum_local_influence) - y0 = cls.predict(x_individual) - print y0 - b = numpy.random.randint(0, X_test.shape[0], p_samples) - X_sample = numpy.array(X_test.ix[b]) - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - super_indices = dataset.sup_ind.keys() - - shapley = dict.fromkeys(super_indices, 0) - if RECORD_COUNTERFACTUALS: - base = numpy.tile(x_individual, (2*p_samples*s_samples, 1)) - #counterfactuals = dict([(sf, (base, numpy.zeros(p_samples*s_samples*2, X_test.shape[1]))) - # for sf in dataset.sup_ind.keys()]) - - counterfactuals = dict([(sf, (base, - numpy.zeros((p_samples*s_samples*2, X_test.shape[1])))) - for sf in dataset.sup_ind.keys()]) - else: - counterfactuals = {} - - for sample in xrange(0, s_samples): - perm = numpy.random.permutation(len(super_indices)) - for i in xrange(0, len(super_indices)): - # Choose a random subset and get string indices by flattening - # excluding si - si = super_indices[perm[i]] - S_m_si = sum([sup_ind[super_indices[perm[j]]] for j in xrange(0, i)], []) - #translate into intiger indices - ls_m_si = [f_columns.get_loc(f) for f in S_m_si] - #repeat x_individual_rep - (p_S, X_S) = v(ls_m_si, x_individual, X_sample) - #also intervene on s_i - ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] - (p_S_si, X_S_si) = v(ls_m_si + ls_si, x_individual, X_sample) - shapley[si] = shapley[si] - (p_S_si - p_S)/s_samples - - if RECORD_COUNTERFACTUALS: - start_ind = 2*sample*p_samples - mid_ind = (2*sample+1)*p_samples - end_ind = 2*(sample+1)*p_samples - counterfactuals[si][1][start_ind:mid_ind] = X_S - counterfactuals[si][1][mid_ind:end_ind] = X_S_si - - return (shapley, counterfactuals) + p_samples = 600 + s_samples = 600 + + def v(S, x, X_inter): + x_rep = numpy.tile(x, (p_samples, 1)) + for f in S: + x_rep[:, f] = X_inter[:, f] + p = ((cls.predict(x_rep) == y0) * 1.).mean() + return (p, x_rep) + + # min_i = numpy.argmin(sum_local_influence) + y0 = cls.predict(x_individual) + print y0 + b = numpy.random.randint(0, X_test.shape[0], p_samples) + X_sample = numpy.array(X_test.ix[b]) + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + super_indices = dataset.sup_ind.keys() + + shapley = dict.fromkeys(super_indices, 0) + if RECORD_COUNTERFACTUALS: + base = numpy.tile(x_individual, (2 * p_samples * s_samples, 1)) + # counterfactuals = dict([(sf, (base, numpy.zeros(p_samples*s_samples*2, X_test.shape[1]))) + # for sf in dataset.sup_ind.keys()]) + + counterfactuals = dict([(sf, (base, + numpy.zeros((p_samples * s_samples * 2, X_test.shape[1])))) + for sf in dataset.sup_ind.keys()]) + else: + counterfactuals = {} + + for sample in xrange(0, s_samples): + perm = numpy.random.permutation(len(super_indices)) + for i in xrange(0, len(super_indices)): + # Choose a random subset and get string indices by flattening + # excluding si + si = super_indices[perm[i]] + S_m_si = sum([sup_ind[super_indices[perm[j]]] for j in xrange(0, i)], []) + # translate into intiger indices + ls_m_si = [f_columns.get_loc(f) for f in S_m_si] + # repeat x_individual_rep + (p_S, X_S) = v(ls_m_si, x_individual, X_sample) + # also intervene on s_i + ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] + (p_S_si, X_S_si) = v(ls_m_si + ls_si, x_individual, X_sample) + shapley[si] = shapley[si] - (p_S_si - p_S) / s_samples + + if RECORD_COUNTERFACTUALS: + start_ind = 2 * sample * p_samples + mid_ind = (2 * sample + 1) * p_samples + end_ind = 2 * (sample + 1) * p_samples + counterfactuals[si][1][start_ind:mid_ind] = X_S + counterfactuals[si][1][mid_ind:end_ind] = X_S_si + + return (shapley, counterfactuals) + def banzhaf_influence(dataset, cls, x_individual, X_test): - p_samples = 600 - s_samples = 600 - - def v(S, x, X_inter): - x_rep = numpy.tile(x, (p_samples, 1)) - for f in S: - x_rep[:, f] = X_inter[:, f] - p = ((cls.predict(x_rep) == y0)*1.).mean() - return p - - #min_i = numpy.argmin(sum_local_influence) - y0 = cls.predict(x_individual) - b = numpy.random.randint(0, X_test.shape[0], p_samples) - X_sample = numpy.array(X_test.ix[b]) - f_columns = dataset.num_data.columns - sup_ind = dataset.sup_ind - super_indices = dataset.sup_ind.keys() - - banzhaf = dict.fromkeys(super_indices, 0) - - for sample in xrange(0, s_samples): - r = numpy.random.ranf(len(super_indices)) - S = [super_indices[i] for i in xrange(0, len(super_indices)) if r[i] > 0.5] - for si in super_indices: - # Choose a random subset and get string indices by flattening - # excluding si - S_m_si = sum([sup_ind[x] for x in S if x != si], []) - #translate into intiger indices - ls_m_si = [f_columns.get_loc(f) for f in S_m_si] - #repeat x_individual_rep - p_S = v(ls_m_si, x_individual, X_sample) - #also intervene on s_i - ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] - p_S_si = v(ls_m_si + ls_si, x_individual, X_sample) - banzhaf[si] = banzhaf[si] - (p_S - p_S_si)/s_samples - return banzhaf + p_samples = 600 + s_samples = 600 + + def v(S, x, X_inter): + x_rep = numpy.tile(x, (p_samples, 1)) + for f in S: + x_rep[:, f] = X_inter[:, f] + p = ((cls.predict(x_rep) == y0) * 1.).mean() + return p + + # min_i = numpy.argmin(sum_local_influence) + y0 = cls.predict(x_individual) + b = numpy.random.randint(0, X_test.shape[0], p_samples) + X_sample = numpy.array(X_test.ix[b]) + f_columns = dataset.num_data.columns + sup_ind = dataset.sup_ind + super_indices = dataset.sup_ind.keys() + + banzhaf = dict.fromkeys(super_indices, 0) + + for sample in xrange(0, s_samples): + r = numpy.random.ranf(len(super_indices)) + S = [super_indices[i] for i in xrange(0, len(super_indices)) if r[i] > 0.5] + for si in super_indices: + # Choose a random subset and get string indices by flattening + # excluding si + S_m_si = sum([sup_ind[x] for x in S if x != si], []) + # translate into intiger indices + ls_m_si = [f_columns.get_loc(f) for f in S_m_si] + # repeat x_individual_rep + p_S = v(ls_m_si, x_individual, X_sample) + # also intervene on s_i + ls_si = [f_columns.get_loc(f) for f in sup_ind[si]] + p_S_si = v(ls_m_si + ls_si, x_individual, X_sample) + banzhaf[si] = banzhaf[si] - (p_S - p_S_si) / s_samples + return banzhaf + def analyze_outliers(counterfactuals, out_cls, cls): - outlier_fracs = {} - new_outlier_fracs = {} - qii = {} - for sf, pairs in counterfactuals.iteritems(): - X = pairs[0] - X_cf = pairs[1] - outs_X = out_cls.predict(X) == -1 - outs_X_cf = out_cls.predict(X_cf) == -1 - outlier_fracs[sf] = numpy.mean(outs_X_cf) - lnot = numpy.logical_not - land = numpy.logical_and - old_outlier_frac = numpy.mean(lnot(outs_X)) - new_outlier_fracs[sf] = numpy.mean(land(lnot(outs_X), outs_X_cf))/old_outlier_frac - qii = numpy.mean(cls.predict(X) != cls.predict(X_cf)) - print 'QII %s %.3f' % (sf, qii) - return (outlier_fracs, new_outlier_fracs) + outlier_fracs = {} + new_outlier_fracs = {} + qii = {} + for sf, pairs in counterfactuals.iteritems(): + X = pairs[0] + X_cf = pairs[1] + outs_X = out_cls.predict(X) == -1 + outs_X_cf = out_cls.predict(X_cf) == -1 + outlier_fracs[sf] = numpy.mean(outs_X_cf) + lnot = numpy.logical_not + land = numpy.logical_and + old_outlier_frac = numpy.mean(lnot(outs_X)) + new_outlier_fracs[sf] = numpy.mean(land(lnot(outs_X), outs_X_cf)) / old_outlier_frac + qii = numpy.mean(cls.predict(X) != cls.predict(X_cf)) + print 'QII %s %.3f' % (sf, qii) + return (outlier_fracs, new_outlier_fracs) From 60dc5bbae873c1663fbb962c2f7f7be60342a194 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Thu, 23 Nov 2017 15:46:30 -0800 Subject: [PATCH 22/31] issue with normalisation fix --- ml_util.py | 2 +- qii.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/ml_util.py b/ml_util.py index ed357fb..99d59aa 100644 --- a/ml_util.py +++ b/ml_util.py @@ -318,7 +318,7 @@ def split_and_train_classifier(args, dataset, scaler=None, normalize=False): ## Split data into training and test data x_train, x_test, y_train, y_test = cross_validation.train_test_split( dataset.num_data, dataset.target, - train_size=0.40, + train_size=0.40, random_state=100 ) x_target_class = None diff --git a/qii.py b/qii.py index 53cb15c..9b177e8 100644 --- a/qii.py +++ b/qii.py @@ -144,12 +144,14 @@ def plot_histogram(dataframe): feature_variations = pd.DataFrame() for cls in dat.y_test.unique(): - x_target_class = dat.x_test[dat.y_test == cls] + x_test = dat.x_test.reset_index(drop=True) + y_test = dat.y_test.reset_index(drop=True) + x_target_class = x_test[y_test == cls] feature_variations = feature_variations.append(qii_lib.get_feature_variations(features_list, dataset, dat.cls, dat.x_test, x_target_class, cls)) - figures_count = 1 + figures_count = 10 for index, group in feature_variations.groupby(['feature']): plt.figure(figures_count) for class_index, class_group in group.groupby(['class']): From e9aefc8c5d7303315c1b9d51251dc8561996d81a Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Fri, 24 Nov 2017 13:31:59 -0800 Subject: [PATCH 23/31] changing the output name --- ml_util.py | 5 +++-- qii.py | 2 +- qii_lib.py | 6 +++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/ml_util.py b/ml_util.py index 99d59aa..37896e1 100644 --- a/ml_util.py +++ b/ml_util.py @@ -390,8 +390,9 @@ def plot_series(series, args, xlabel, ylabel): plt.ylabel(ylabel, labelfont) plt.tight_layout() if (args.output_pdf == True): - pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') - print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + '.pdf') + class_value = str(args.class_influence) if args.class_influence is not None else '' + pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value +'.pdf') + print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value + '.pdf') pp.savefig(bbox_inches='tight') pp.close() if (args.show_plot == True): diff --git a/qii.py b/qii.py index 9b177e8..6fd40fb 100644 --- a/qii.py +++ b/qii.py @@ -36,7 +36,7 @@ def __main__(): print 'End Training Classifier' ######### End Training Classifier ########## - measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) + # measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) t_start = time.time() diff --git a/qii_lib.py b/qii_lib.py index 3bfa3a7..21f8432 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -140,7 +140,7 @@ def average_local_influence(dataset, cls, X): counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter average_local_inf[sf] = 1 - (local_influence / iters).mean() - # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + print('Influence %s: %.3f' % (sf, average_local_inf[sf])) return (average_local_inf, counterfactuals) @@ -165,7 +165,7 @@ def average_local_class_influence(dataset, cls, X, target_class_X): counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter average_local_inf_class[sf] = 1 - (local_influence / iters).mean() - # print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) return (average_local_inf_class, counterfactuals) @@ -194,7 +194,7 @@ def get_feature_variations(features_list, dataset, cls, X, target_class_X, class influences.append((local_influence / iters).mean()) feature_dataframe['influences'] = influences average_local_inf_class = average_local_inf_class.append(feature_dataframe) - # print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) return average_local_inf_class From b47def87d7098dcc40b914e077ffcc18011c1949 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Fri, 24 Nov 2017 13:42:52 -0800 Subject: [PATCH 24/31] changing debugs and file save names --- ml_util.py | 4 +++- qii_lib.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/ml_util.py b/ml_util.py index 37896e1..b593ab9 100644 --- a/ml_util.py +++ b/ml_util.py @@ -381,7 +381,9 @@ def train_classifier(args, X_train, y_train): def plot_series(series, args, xlabel, ylabel): - plt.figure(figsize=(5, 4)) + # plt.figure(figsize=(5, 4)) + plt.ioff() + plt.figure(figsize=(10, 10)) series.sort_values(inplace=True, ascending=False) # average_local_inf_series.plot(kind="bar", facecolor='#ff9999', edgecolor='white') series.plot(kind="bar") diff --git a/qii_lib.py b/qii_lib.py index 21f8432..b09b156 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -194,7 +194,7 @@ def get_feature_variations(features_list, dataset, cls, X, target_class_X, class influences.append((local_influence / iters).mean()) feature_dataframe['influences'] = influences average_local_inf_class = average_local_inf_class.append(feature_dataframe) - print('Influence %s: %.3f' % (sf, average_local_inf_class[sf])) + print('Influence %s is done' % (sf)) return average_local_inf_class @@ -219,7 +219,7 @@ def unary_individual_influence(dataset, cls, x_ind, X): counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter average_local_inf[sf] = 1 - (local_influence / iters).mean() - # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) return (average_local_inf, counterfactuals) From 9b13c1d92f518875cb744fe781fd693e043c9578 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Fri, 24 Nov 2017 13:46:31 -0800 Subject: [PATCH 25/31] fixing the random state --- ml_util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ml_util.py b/ml_util.py index b593ab9..a11475e 100644 --- a/ml_util.py +++ b/ml_util.py @@ -373,6 +373,7 @@ def train_classifier(args, X_train, y_train): cls = GradientBoostingClassifier(n_estimators=args.n_estimators, learning_rate=1.0, max_depth=args.max_depth, + random_state=100 ) # Train sklearn model From b363ba4e5c2c77f706a47ab969251e4050156690 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 28 Nov 2017 18:51:11 -0800 Subject: [PATCH 26/31] restoring to default --- ml_util.py | 2 +- qii.py | 9 +++------ qii_lib.py | 1 - 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/ml_util.py b/ml_util.py index a11475e..87e7969 100644 --- a/ml_util.py +++ b/ml_util.py @@ -313,7 +313,7 @@ def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): argparse.Namespace.__init__(self, **kw) -def split_and_train_classifier(args, dataset, scaler=None, normalize=False): +def split_and_train_classifier(args, dataset, scaler=None, normalize=True): classifier = args.classifier ## Split data into training and test data x_train, x_test, y_train, y_test = cross_validation.train_test_split( diff --git a/qii.py b/qii.py index 6fd40fb..a9df240 100644 --- a/qii.py +++ b/qii.py @@ -5,8 +5,7 @@ """ import time -import pdb -import pandas as pd +import pandas as pd import numpy import matplotlib.pyplot as plt import numpy.linalg @@ -36,7 +35,7 @@ def __main__(): print 'End Training Classifier' ######### End Training Classifier ########## - # measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) + measure_analytics(dataset, dat.cls, dat.x_test, dat.y_test, dat.sens_test) t_start = time.time() @@ -151,9 +150,8 @@ def plot_histogram(dataframe): dataset, dat.cls, dat.x_test, x_target_class, cls)) - figures_count = 10 for index, group in feature_variations.groupby(['feature']): - plt.figure(figures_count) + plt.figure() for class_index, class_group in group.groupby(['class']): plt.plot(class_group['bin_edges'], class_group['influences'], label=class_index) plt.legend(loc='best') @@ -165,7 +163,6 @@ def plot_histogram(dataframe): pp.close() if args.show_plot: plt.show() - figures_count += 1 __main__() diff --git a/qii_lib.py b/qii_lib.py index b09b156..3a7ccce 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -2,7 +2,6 @@ import pandas as pd import numpy -import pdb from scipy.stats import binned_statistic RECORD_COUNTERFACTUALS = False From dc34199f48b99571b84b6c69e4a6500fcd0f30e7 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 28 Nov 2017 18:55:33 -0800 Subject: [PATCH 27/31] restoring to default --- ml_util.py | 1 + qii.py | 1 + qii_lib.py | 4 ++-- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ml_util.py b/ml_util.py index 87e7969..72226c3 100644 --- a/ml_util.py +++ b/ml_util.py @@ -315,6 +315,7 @@ def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): def split_and_train_classifier(args, dataset, scaler=None, normalize=True): classifier = args.classifier + ## Split data into training and test data x_train, x_test, y_train, y_test = cross_validation.train_test_split( dataset.num_data, dataset.target, diff --git a/qii.py b/qii.py index a9df240..56e30f2 100644 --- a/qii.py +++ b/qii.py @@ -134,6 +134,7 @@ def eval_class_average_unary(dataset, args, dat): def get_feature_variation_plots(features_list, dataset, args, dat): + def plot_histogram(dataframe): data = dataframe.copy() data = data.drop(['feature', 'class'], axis=1) diff --git a/qii_lib.py b/qii_lib.py index 3a7ccce..ea6b7ec 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -133,7 +133,7 @@ def average_local_influence(dataset, cls, X): for i in xrange(0, iters): X_inter = random_intervene(numpy.array(X), ls) y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter) * 1. + local_influence += (y_pred == y_pred_inter) * 1. if RECORD_COUNTERFACTUALS: n = X_inter.shape[0] counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter @@ -158,7 +158,7 @@ def average_local_class_influence(dataset, cls, X, target_class_X): for i in xrange(0, iters): X_inter = random_intervene_class(numpy.array(X), numpy.array(target_class_X), ls) y_pred_inter = cls.predict(X_inter) - local_influence = local_influence + (y_pred == y_pred_inter) * 1. + local_influence += (y_pred == y_pred_inter) * 1. if RECORD_COUNTERFACTUALS: n = X_inter.shape[0] counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter From b75eaa55368378bc64d521b3adee699e348637ab Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 28 Nov 2017 19:12:11 -0800 Subject: [PATCH 28/31] trying to fix formatting --- ml_util.py | 730 +++++++++++++++++++++++++++-------------------------- qii_lib.py | 10 +- 2 files changed, 375 insertions(+), 365 deletions(-) diff --git a/ml_util.py b/ml_util.py index 46aa388..6d6a1ca 100644 --- a/ml_util.py +++ b/ml_util.py @@ -23,28 +23,33 @@ from qii_lib import * - -#labelfont = {'fontname':'Times New Roman', 'size':15} +# labelfont = {'fontname':'Times New Roman', 'size':15} labelfont = {} -#hfont = {'fontname':'Helvetica'} + + +# hfont = {'fontname':'Helvetica'} def get_column_index(data, cname): - try: - idx = data.columns.get_loc(cname) - except Exception as e: - raise ValueError("Unknown column %s" % cname) - - return idx + try: + idx = data.columns.get_loc(cname) + except Exception as e: + raise ValueError("Unknown column %s" % cname) + + return idx + def encode_nominal(col): - if col.dtype == object: - return LabelEncoder().fit_transform(col) - else: - return col + if col.dtype == object: + return LabelEncoder().fit_transform(col) + else: + return col + import argparse + + class Dataset(object): - """ + """ Class that holds a dataset. Each dataset has its own quirks and needs some special processing to get to the point where we need it to. @@ -70,375 +75,380 @@ class Dataset(object): or the sensitive column from a dataset """ - def __init__( self, dataset, sensitive=None, target=None): - self.name = dataset - - # Warfarin dosage dataset - if (dataset == 'iwpc'): - self.num_data = pd.DataFrame.from_records( - arff.load('data/iwpc/iwpc_train_class.arff'), - columns=[ - 'index', 'race=black', 'race=asian', 'age', 'height', 'weight', 'amiodarone', - 'cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22', - 'vkorc1=CT', 'vkorc1=TT', 'decr', 'dose' - ]) - self.sup_ind = {} - self.sup_ind['race'] = ['race=black','race=asian'] - self.sup_ind['age'] = ['age'] - self.sup_ind['height'] = ['height'] - self.sup_ind['weight'] = ['weight'] - self.sup_ind['amiodarone'] = ['amiodarone'] - self.sup_ind['cyp2c9'] = ['cyp2c9=13','cyp2c9=12','cyp2c9=23','cyp2c9=33','cyp2c9=22'] - self.sup_ind['vkorc1'] = ['vkorc1=CT','vkorc1=TT'] - self.sup_ind['decr'] = ['decr'] - self.sup_ind['dose'] = ['dose'] - self.target_ix = 'dose' - self.sensitive_ix = 'race=black' - if sensitive is None: - self.get_sensitive = (lambda X: X['race=black']) - - self.target = self.num_data['dose'] - self.num_data = self.num_data.drop(['index'], axis = 1) - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind['dose'] - - - #Adult dataset - elif (dataset == 'adult'): - self.original_data = pd.read_csv( - "data/adult/adult.data", - names=[ - "Age", "Workclass", "fnlwgt", "Education", "Education-Num", "Marital Status", - "Occupation", "Relationship", "Race", "Gender", "Capital Gain", "Capital Loss", - "Hours per week", "Country", "Target"], - sep=r'\s*,\s*', - engine='python', - na_values="?") - del self.original_data['fnlwgt'] - self.sup_ind = make_super_indices(self.original_data) - self.num_data = pd.get_dummies(self.original_data) - self.target_ix = 'Target' - self.sensitive_ix = sensitive - - #Define and dedup Target - self.target = self.num_data['Target_>50K'] - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind['Target'] - - #Dedup Gender - self.num_data['Gender'] = self.num_data['Gender_Male'] - self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) - self.sup_ind['Gender'] = ['Gender'] - - if sensitive is None: - self.get_sensitive = (lambda X: X['Gender']) - elif (sensitive == ''): - self.get_sensitive = (lambda X: None) - else: - raise ValueError('Cannot handle sensitive '+sensitive+' in dataset '+dataset) - - - #National Longitudinal Survey of Youth 97 - elif (dataset == 'nlsy97'): - self.original_data = pd.read_csv( - "data/nlsy97/20151026/processed_output.csv", - names = ["PUBID.1997", "Gender", "Birth Year", "Census Region", - "Race", "Arrests", "Drug History", "Smoking History"], - sep=r'\s*,\s*', - engine='python', - quoting=2, - na_values="?") - del self.original_data['PUBID.1997'] - self.target_ix = 'Arrests' - self.sensitive_ix = sensitive - self.sup_ind = make_super_indices(self.original_data) - self.num_data = pd.get_dummies(self.original_data) - - #Define and dedup Target - self.target = (self.num_data['Arrests'] > 0)*1. - self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis = 1) - del self.sup_ind[self.target_ix] - - #Dedup Gender - self.num_data['Gender'] = self.num_data['Gender_"Male"'] - self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis = 1) - self.sup_ind['Gender'] = ['Gender'] - - if sensitive is None or sensitive == 'Gender': - self.get_sensitive = (lambda X: X['Gender']) - elif (sensitive == 'Race'): - self.get_sensitive = (lambda X: X['Race_"Black"']) - else: - raise ValueError('Cannot handle sensitive '+sensitive+' in dataset '+dataset) - - - #German Datset (Incomplete) - elif (dataset == 'german'): - #http://programming-r-pro-bro.blogspot.com/2011/09/modelling-with-r-part-1.html - original_data = pd.read_csv( - "data/german/processed_output.csv", - names = ["PUBID.1997", "Gender", "Birth Year", "Census Region", - "Race", "Arrests", "Drug History", "Smoking History"], - sep=r'\s*,\s*', - engine='python', - na_values="?") - - elif exists(dataset): - print "loading new dataset %s" % dataset - - self.original_data = pd.read_csv(dataset) - - if target is None: - target = self.original_data.columns[-1] - self.target_ix = target - if self.target_ix not in self.original_data: - raise ValueError("unknown target feature %s" % self.target_ix) - - if sensitive is None: - sensitive = self.original_data.columns[0] - self.sensitive_ix = sensitive - if self.sensitive_ix not in self.original_data: - raise ValueError("unkown sensitive feature %s" % self.sensitive_ix) - - if self.sensitive_ix == self.target_ix: - print "WARNING: target and sensitive attributes are the same (%s), I'm unsure whether this tool handles this case correctly" % target - - nominal_cols = set(self.original_data.select_dtypes(include=['object']).columns) - - self.num_data = pd.get_dummies( - self.original_data, - prefix_sep='_', - columns=nominal_cols-set([target,sensitive])) - - self.num_data = self.num_data.apply(encode_nominal) - - self.sup_ind = make_super_indices(self.original_data) - - if self.target_ix in nominal_cols: - targets = len(set(self.original_data[target])) - if targets > 2: - print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (target, targets) - del self.sup_ind[self.target_ix] - # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) - - if self.sensitive_ix in nominal_cols: - targets = len(set(self.original_data[sensitive])) - if targets > 2: - print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % (sensitive, targets) - self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] - # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) - - self.target = self.num_data[self.target_ix] - self.num_data = self.num_data.drop([self.target_ix], axis = 1) - - self.get_sensitive = lambda X: X[self.sensitive_ix] - - print "target feature = %s" % self.target_ix - print "sensitive feature = %s" % self.sensitive_ix - - else: - raise ValueError("Unknown dataset %s" % dataset) - - def delete_index ( self, index ): - self.num_data.drop(self.sup_ind[index], axis = 1) - del self.sup_ind[index] - - -#Categorical features are encoded as binary features, one for each category -#A super index keeps track of the mapping between a feature and its binary representation -def make_super_indices( dataset ): - sup_ind = {} - for i in dataset.columns: - if dataset[i].dtype != 'O': - sup_ind[i] = [i] - else: - unique = filter(lambda v: v==v, dataset[i].unique()) - sup_ind[i] = [i + '_' + s for s in unique] - return sup_ind + def __init__(self, dataset, sensitive=None, target=None): + self.name = dataset + + # Warfarin dosage dataset + if (dataset == 'iwpc'): + self.num_data = pd.DataFrame.from_records( + arff.load('data/iwpc/iwpc_train_class.arff'), + columns=[ + 'index', 'race=black', 'race=asian', 'age', 'height', 'weight', 'amiodarone', + 'cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22', + 'vkorc1=CT', 'vkorc1=TT', 'decr', 'dose' + ]) + self.sup_ind = {} + self.sup_ind['race'] = ['race=black', 'race=asian'] + self.sup_ind['age'] = ['age'] + self.sup_ind['height'] = ['height'] + self.sup_ind['weight'] = ['weight'] + self.sup_ind['amiodarone'] = ['amiodarone'] + self.sup_ind['cyp2c9'] = ['cyp2c9=13', 'cyp2c9=12', 'cyp2c9=23', 'cyp2c9=33', 'cyp2c9=22'] + self.sup_ind['vkorc1'] = ['vkorc1=CT', 'vkorc1=TT'] + self.sup_ind['decr'] = ['decr'] + self.sup_ind['dose'] = ['dose'] + self.target_ix = 'dose' + self.sensitive_ix = 'race=black' + if sensitive is None: + self.get_sensitive = (lambda X: X['race=black']) + + self.target = self.num_data['dose'] + self.num_data = self.num_data.drop(['index'], axis=1) + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind['dose'] + + + # Adult dataset + elif (dataset == 'adult'): + self.original_data = pd.read_csv( + "data/adult/adult.data", + names=[ + "Age", "Workclass", "fnlwgt", "Education", "Education-Num", "Marital Status", + "Occupation", "Relationship", "Race", "Gender", "Capital Gain", "Capital Loss", + "Hours per week", "Country", "Target"], + sep=r'\s*,\s*', + engine='python', + na_values="?") + del self.original_data['fnlwgt'] + self.sup_ind = make_super_indices(self.original_data) + self.num_data = pd.get_dummies(self.original_data) + self.target_ix = 'Target' + self.sensitive_ix = sensitive + + # Define and dedup Target + self.target = self.num_data['Target_>50K'] + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind['Target'] + + # Dedup Gender + self.num_data['Gender'] = self.num_data['Gender_Male'] + self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis=1) + self.sup_ind['Gender'] = ['Gender'] + + if sensitive is None: + self.get_sensitive = (lambda X: X['Gender']) + elif (sensitive == ''): + self.get_sensitive = (lambda X: None) + else: + raise ValueError('Cannot handle sensitive ' + sensitive + ' in dataset ' + dataset) + + + # National Longitudinal Survey of Youth 97 + elif (dataset == 'nlsy97'): + self.original_data = pd.read_csv( + "data/nlsy97/20151026/processed_output.csv", + names=["PUBID.1997", "Gender", "Birth Year", "Census Region", + "Race", "Arrests", "Drug History", "Smoking History"], + sep=r'\s*,\s*', + engine='python', + quoting=2, + na_values="?") + del self.original_data['PUBID.1997'] + self.target_ix = 'Arrests' + self.sensitive_ix = sensitive + self.sup_ind = make_super_indices(self.original_data) + self.num_data = pd.get_dummies(self.original_data) + + # Define and dedup Target + self.target = (self.num_data['Arrests'] > 0) * 1. + self.num_data = self.num_data.drop(self.sup_ind[self.target_ix], axis=1) + del self.sup_ind[self.target_ix] + + # Dedup Gender + self.num_data['Gender'] = self.num_data['Gender_"Male"'] + self.num_data = self.num_data.drop(self.sup_ind['Gender'], axis=1) + self.sup_ind['Gender'] = ['Gender'] + + if sensitive is None or sensitive == 'Gender': + self.get_sensitive = (lambda X: X['Gender']) + elif (sensitive == 'Race'): + self.get_sensitive = (lambda X: X['Race_"Black"']) + else: + raise ValueError('Cannot handle sensitive ' + sensitive + ' in dataset ' + dataset) + + + # German Datset (Incomplete) + elif (dataset == 'german'): + # http://programming-r-pro-bro.blogspot.com/2011/09/modelling-with-r-part-1.html + original_data = pd.read_csv( + "data/german/processed_output.csv", + names=["PUBID.1997", "Gender", "Birth Year", "Census Region", + "Race", "Arrests", "Drug History", "Smoking History"], + sep=r'\s*,\s*', + engine='python', + na_values="?") + + elif exists(dataset): + print "loading new dataset %s" % dataset + + self.original_data = pd.read_csv(dataset) + + if target is None: + target = self.original_data.columns[-1] + self.target_ix = target + if self.target_ix not in self.original_data: + raise ValueError("unknown target feature %s" % self.target_ix) + + if sensitive is None: + sensitive = self.original_data.columns[0] + self.sensitive_ix = sensitive + if self.sensitive_ix not in self.original_data: + raise ValueError("unkown sensitive feature %s" % self.sensitive_ix) + + if self.sensitive_ix == self.target_ix: + print "WARNING: target and sensitive attributes are the same (%s), I'm unsure whether this tool handles this case correctly" % target + + nominal_cols = set(self.original_data.select_dtypes(include=['object']).columns) + + self.num_data = pd.get_dummies( + self.original_data, + prefix_sep='_', + columns=nominal_cols - set([target, sensitive])) + + self.num_data = self.num_data.apply(encode_nominal) + + self.sup_ind = make_super_indices(self.original_data) + + if self.target_ix in nominal_cols: + targets = len(set(self.original_data[target])) + if targets > 2: + print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( + target, targets) + del self.sup_ind[self.target_ix] + # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) + + if self.sensitive_ix in nominal_cols: + targets = len(set(self.original_data[sensitive])) + if targets > 2: + print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( + sensitive, targets) + self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] + # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) + + self.target = self.num_data[self.target_ix] + self.num_data = self.num_data.drop([self.target_ix], axis=1) + + self.get_sensitive = lambda X: X[self.sensitive_ix] + + print "target feature = %s" % self.target_ix + print "sensitive feature = %s" % self.sensitive_ix + + else: + raise ValueError("Unknown dataset %s" % dataset) + + def delete_index(self, index): + self.num_data.drop(self.sup_ind[index], axis=1) + del self.sup_ind[index] + + +# Categorical features are encoded as binary features, one for each category +# A super index keeps track of the mapping between a feature and its binary representation +def make_super_indices(dataset): + sup_ind = {} + for i in dataset.columns: + if dataset[i].dtype != 'O': + sup_ind[i] = [i] + else: + unique = filter(lambda v: v == v, dataset[i].unique()) + sup_ind[i] = [i + '_' + s for s in unique] + return sup_ind ## Parse arguments def get_arguments(): - parser = argparse.ArgumentParser() - parser.add_argument('dataset', help='Name of dataset used') - parser.add_argument('-m', '--measure', - default='average-unary-individual', - help='Quantity of interest', - choices=['average-unary-individual','unary-individual', - 'discrim', 'banzhaf', 'shapley', 'average-unary-class']) - parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') - parser.add_argument('-t', '--target', default=None, help='Target field', type=str) - - parser.add_argument('-e', '--erase-sensitive', action='store_false', help='Erase sensitive field from dataset') - parser.add_argument('-p', '--show-plot', action='store_true', help='Output plot as pdf') - parser.add_argument('-o', '--output-pdf', action='store_true', help='Output plot as pdf') - parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', - choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) - - parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') - parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') - parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) - - parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') - parser.add_argument('-r', '--record-counterfactuals', action='store_true', help='Store counterfactual pairs for causal analysis') - parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') - parser.add_argument('-q', '--class_influence', default=None, type=int, help='Index of the the target class for causal analysis') - - args = parser.parse_args() - if args.seed is not None: - numpy.random.seed([args.seed]) - - return args + parser = argparse.ArgumentParser() + parser.add_argument('dataset', help='Name of dataset used') + parser.add_argument('-m', '--measure', + default='average-unary-individual', + help='Quantity of interest', + choices=['average-unary-individual', 'unary-individual', + 'discrim', 'banzhaf', 'shapley', 'average-unary-class']) + parser.add_argument('-s', '--sensitive', default=None, help='Sensitive field') + parser.add_argument('-t', '--target', default=None, help='Target field', type=str) + + parser.add_argument('-e', '--erase-sensitive', action='store_false', help='Erase sensitive field from dataset') + parser.add_argument('-p', '--show-plot', action='store_true', help='Output plot as pdf') + parser.add_argument('-o', '--output-pdf', action='store_true', help='Output plot as pdf') + parser.add_argument('-c', '--classifier', default='logistic', help='Classifier to use', + choices=['logistic', 'svm', 'decision-tree', 'decision-forest']) + + parser.add_argument('--max_depth', type=int, default=2, help='Max depth for decision trees and forests') + parser.add_argument('--n_estimators', type=int, default=20, help='Number of trees for decision forests') + parser.add_argument('--seed', default=None, help='Random seed, auto seeded if not specified', type=int) + + parser.add_argument('-i', '--individual', default=0, type=int, help='Index for Individualized Transparency Report') + parser.add_argument('-r', '--record-counterfactuals', action='store_true', + help='Store counterfactual pairs for causal analysis') + parser.add_argument('-a', '--active-iterations', type=int, default=10, help='Active Learning Iterations') + parser.add_argument('-q', '--class_influence', default=None, type=int, + help='Index of the the target class for causal analysis') + + args = parser.parse_args() + if args.seed is not None: + numpy.random.seed([args.seed]) + + return args + class Setup(argparse.Namespace): - def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): - self.cls = cls - self.x_test = x_test - self.y_test = y_test - self.x_target_class = x_target_class - self.sens_test = sens_test - #for k in kw: - # self.__setattr__(k, kw[k]) - argparse.Namespace.__init__(self, **kw) + def __init__(self, cls, x_test, y_test, x_target_class, sens_test, **kw): + self.cls = cls + self.x_test = x_test + self.y_test = y_test + self.x_target_class = x_target_class + self.sens_test = sens_test + # for k in kw: + # self.__setattr__(k, kw[k]) + argparse.Namespace.__init__(self, **kw) + def split_and_train_classifier(args, dataset, scaler=None, normalize=True): - classifier = args.classifier - ## Split data into training and test data - x_train, x_test, y_train, y_test = cross_validation.train_test_split( - dataset.num_data, dataset.target, - train_size=0.40, random_state=100 - ) - - x_target_class = None - if args.class_influence is not None: - target_class_type = type(y_test.iloc[0]) - target_class = target_class_type(args.class_influence) - x_target_class = x_test[y_test == target_class] - - sens_train = dataset.get_sensitive(x_train) - sens_test = dataset.get_sensitive(x_test) - if normalize: - if (scaler == None): - # Initialize scaler to normalize training data - scaler = preprocessing.StandardScaler() - scaler.fit(x_train) - - # Normalize all training and test data - x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) - x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) - if x_target_class is not None: - x_target_class = pd.DataFrame(scaler.transform(x_target_class), columns=(dataset.num_data.columns)) - - cls = train_classifier(args, x_train, y_train) - - return Setup(cls = cls, - scaler = scaler, - x_train = x_train, - x_test = x_test, - y_train = y_train, - y_test = y_test, - x_target_class = x_target_class, - sens_train = sens_train, - sens_test = sens_test) + classifier = args.classifier + ## Split data into training and test data + x_train, x_test, y_train, y_test = cross_validation.train_test_split( + dataset.num_data, dataset.target, + train_size=0.40, random_state=100 + ) + + x_target_class = None + if args.class_influence is not None: + target_class_type = type(y_test.iloc[0]) + target_class = target_class_type(args.class_influence) + x_target_class = x_test[y_test == target_class] + + sens_train = dataset.get_sensitive(x_train) + sens_test = dataset.get_sensitive(x_test) + if normalize: + if (scaler == None): + # Initialize scaler to normalize training data + scaler = preprocessing.StandardScaler() + scaler.fit(x_train) + + # Normalize all training and test data + x_train = pd.DataFrame(scaler.transform(x_train), columns=(dataset.num_data.columns)) + x_test = pd.DataFrame(scaler.transform(x_test), columns=(dataset.num_data.columns)) + if x_target_class is not None: + x_target_class = pd.DataFrame(scaler.transform(x_target_class), columns=(dataset.num_data.columns)) + + cls = train_classifier(args, x_train, y_train) + + return Setup(cls=cls, + scaler=scaler, + x_train=x_train, + x_test=x_test, + y_train=y_train, + y_test=y_test, + x_target_class=x_target_class, + sens_train=sens_train, + sens_test=sens_test) def train_classifier(args, X_train, y_train): - classifier = args.classifier - # Initialize sklearn classifier model - if (classifier == 'logistic'): - import sklearn.linear_model as linear_model - cls = linear_model.LogisticRegression() - elif (classifier == 'svm'): - from sklearn import svm - cls = svm.SVC(kernel='linear', cache_size=7000, - ) - elif (classifier == 'decision-tree'): - import sklearn.linear_model as linear_model - cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, - ) - elif (classifier == 'decision-forest'): - from sklearn.ensemble import GradientBoostingClassifier - cls = GradientBoostingClassifier(n_estimators=args.n_estimators, - learning_rate=1.0, - max_depth=args.max_depth, - random_state=100 - ) - - #Train sklearn model - cls.fit(X_train, y_train) - return cls - + classifier = args.classifier + # Initialize sklearn classifier model + if (classifier == 'logistic'): + import sklearn.linear_model as linear_model + cls = linear_model.LogisticRegression() + elif (classifier == 'svm'): + from sklearn import svm + cls = svm.SVC(kernel='linear', cache_size=7000, + ) + elif (classifier == 'decision-tree'): + import sklearn.linear_model as linear_model + cls = tree.DecisionTreeClassifier(max_depth=args.max_depth, + ) + elif (classifier == 'decision-forest'): + from sklearn.ensemble import GradientBoostingClassifier + cls = GradientBoostingClassifier(n_estimators=args.n_estimators, + learning_rate=1.0, + max_depth=args.max_depth, + random_state=100 + ) + + # Train sklearn model + cls.fit(X_train, y_train) + return cls def plot_series(series, args, xlabel, ylabel): - # plt.figure(figsize=(5, 4)) - plt.ioff() - plt.figure(figsize=(10, 10)) - series.sort_values(inplace=True, ascending=False) - # average_local_inf_series.plot(kind="bar", facecolor='#ff9999', edgecolor='white') - series.plot(kind="bar") - plt.xticks(rotation=45, ha='right', size='small') - plt.xlabel(xlabel, labelfont) - plt.ylabel(ylabel, labelfont) - plt.tight_layout() - if (args.output_pdf == True): - class_value = str(args.class_influence) if args.class_influence is not None else '' - pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value +'.pdf') - print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value + '.pdf') - pp.savefig(bbox_inches='tight') - pp.close() - if (args.show_plot == True): - plt.show() + plt.ioff() + plt.figure(figsize=(10, 10)) + series.sort_values(inplace=True, ascending=False) + # average_local_inf_series.plot(kind="bar", facecolor='#ff9999', edgecolor='white') + series.plot(kind="bar") + plt.xticks(rotation=45, ha='right', size='small') + plt.xlabel(xlabel, labelfont) + plt.ylabel(ylabel, labelfont) + plt.tight_layout() + if (args.output_pdf == True): + class_value = str(args.class_influence) if args.class_influence is not None else '' + pp = PdfPages('figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value + '.pdf') + print ('Writing to figure-' + args.measure + '-' + args.dataset + '-' + args.classifier + class_value + '.pdf') + pp.savefig(bbox_inches='tight') + pp.close() + if (args.show_plot == True): + plt.show() def plot_series_with_baseline(series, args, xlabel, ylabel, baseline): - series.sort(ascending = True) - plt.figure(figsize=(5,4)) - #plt.bar(range(series.size), series.as_matrix() - baseline) - #(series - baseline).plot(kind="bar", facecolor='#ff9999', edgecolor='white') - (series - baseline).plot(kind="bar") - #plt.xticks(range(series.size), series.keys(), size='small') - x1,x2,y1,y2 = plt.axis() - X = range(series.size) - for x,y in zip(X,series.as_matrix() - baseline): - x_wd = 1. / series.size - if(y < 0): - plt.text(x+x_wd/2, y-0.01, '%.2f' % (y), ha='center', va= 'bottom', size='small') - else: - plt.text(x+x_wd/2, y+0.01, '%.2f' % (y), ha='center', va= 'top', size='small') - plt.axis((x1,x2,-baseline,y2 + 0.01)) - plt.xticks(rotation = 45, ha = 'right', size='small') - plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x,_: '%1.2f' % (x + baseline))) - plt.axhline(linestyle = 'dashed', color = 'black') - plt.text(x_wd, 0, 'Original Discrimination', ha = 'left', va = 'bottom') - plt.xlabel(xlabel, labelfont) - plt.ylabel(ylabel, labelfont) - plt.tight_layout() - if (args.output_pdf == True): - pp = PdfPages('figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') - print ('Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') - pp.savefig() - pp.close() - plt.show() + series.sort(ascending=True) + plt.figure(figsize=(5, 4)) + # plt.bar(range(series.size), series.as_matrix() - baseline) + # (series - baseline).plot(kind="bar", facecolor='#ff9999', edgecolor='white') + (series - baseline).plot(kind="bar") + # plt.xticks(range(series.size), series.keys(), size='small') + x1, x2, y1, y2 = plt.axis() + X = range(series.size) + for x, y in zip(X, series.as_matrix() - baseline): + x_wd = 1. / series.size + if (y < 0): + plt.text(x + x_wd / 2, y - 0.01, '%.2f' % (y), ha='center', va='bottom', size='small') + else: + plt.text(x + x_wd / 2, y + 0.01, '%.2f' % (y), ha='center', va='top', size='small') + plt.axis((x1, x2, -baseline, y2 + 0.01)) + plt.xticks(rotation=45, ha='right', size='small') + plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x, _: '%1.2f' % (x + baseline))) + plt.axhline(linestyle='dashed', color='black') + plt.text(x_wd, 0, 'Original Discrimination', ha='left', va='bottom') + plt.xlabel(xlabel, labelfont) + plt.ylabel(ylabel, labelfont) + plt.tight_layout() + if (args.output_pdf == True): + pp = PdfPages( + 'figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + print ( + 'Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + pp.savefig() + pp.close() + plt.show() def measure_analytics(dataset, cls, X, y, sens=None): - y_pred = cls.predict(X) - - error_rate = numpy.mean((y_pred != y)*1.) - print('test error rate: %.3f' % error_rate) + y_pred = cls.predict(X) - discrim0 = discrim(numpy.array(X), cls, numpy.array(sens)) - print('Initial Discrimination: %.3f' % discrim0) + error_rate = numpy.mean((y_pred != y) * 1.) + print('test error rate: %.3f' % error_rate) - from scipy.stats.stats import pearsonr - corr0 = pearsonr(sens, y)[0] - print('Correlation: %.3f' % corr0) + discrim0 = discrim(numpy.array(X), cls, numpy.array(sens)) + print('Initial Discrimination: %.3f' % discrim0) - ji = metrics.jaccard_similarity_score(y, sens) - print('JI: %.3f' % ji) + from scipy.stats.stats import pearsonr + corr0 = pearsonr(sens, y)[0] + print('Correlation: %.3f' % corr0) - mi = metrics.normalized_mutual_info_score(y, sens) - print('MI: %.3f' % mi) + ji = metrics.jaccard_similarity_score(y, sens) + print('JI: %.3f' % ji) + mi = metrics.normalized_mutual_info_score(y, sens) + print('MI: %.3f' % mi) diff --git a/qii_lib.py b/qii_lib.py index ea6b7ec..4296aa1 100644 --- a/qii_lib.py +++ b/qii_lib.py @@ -56,7 +56,7 @@ def get_histogram_bins(X, cols, num_bins=40): col = cols[0] column_values = X[:, col] _, bin_edges, binned_indices = binned_statistic(column_values, numpy.ones(len(column_values)), statistic='sum', - bins=num_bins) + bins=num_bins) return binned_indices, bin_edges @@ -178,15 +178,15 @@ def get_feature_variations(features_list, dataset, cls, X, target_class_X, class indices = features_list.reset_index()['index'] for sf in indices: ls = [f_columns.get_loc(f) for f in sup_ind[sf]] - binned_indices, bin_edges = get_histogram_bins(numpy.array(X),ls,num_bins=bins) - feature_dataframe = pd.DataFrame({'bin_edges' : bin_edges[0:-1]}) + binned_indices, bin_edges = get_histogram_bins(numpy.array(X), ls, num_bins=bins) + feature_dataframe = pd.DataFrame({'bin_edges': bin_edges[0:-1]}) feature_dataframe['class'] = class_name feature_dataframe['feature'] = sf influences = [] for bin in xrange(0, bins): local_influence = numpy.zeros(y_pred.shape[0]) for iter in xrange(0, iters): - X_inter = yield_increasing_bins(numpy.array(X), numpy.array(target_class_X),binned_indices, ls, bin) + X_inter = yield_increasing_bins(numpy.array(X), numpy.array(target_class_X), binned_indices, ls, bin) if X_inter is not None: y_pred_inter = cls.predict(X_inter) local_influence = local_influence + (y_pred == y_pred_inter) * 1. @@ -218,7 +218,7 @@ def unary_individual_influence(dataset, cls, x_ind, X): counterfactuals[sf][1][i * n:(i + 1) * n] = X_inter average_local_inf[sf] = 1 - (local_influence / iters).mean() - # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) + # print('Influence %s: %.3f' % (sf, average_local_inf[sf])) return (average_local_inf, counterfactuals) From 457d858504b66028e1182f392c37198f28d0b8aa Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 28 Nov 2017 19:17:21 -0800 Subject: [PATCH 29/31] trying to fix formatting --- ml_util.py | 8 ++++---- qii.py | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/ml_util.py b/ml_util.py index 6d6a1ca..6338d4d 100644 --- a/ml_util.py +++ b/ml_util.py @@ -224,7 +224,7 @@ def __init__(self, dataset, sensitive=None, target=None): targets = len(set(self.original_data[target])) if targets > 2: print "WARNING: target feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( - target, targets) + target, targets) del self.sup_ind[self.target_ix] # self.target_ix = "%s_%s" % (self.target_ix,self.original_data[self.target_ix][0]) @@ -232,9 +232,9 @@ def __init__(self, dataset, sensitive=None, target=None): targets = len(set(self.original_data[sensitive])) if targets > 2: print "WARNING: sensitive feature %s has more than 2 values (it has %d), I'm unsure whether this tool handles that correctly" % ( - sensitive, targets) + sensitive, targets) self.sup_ind[self.sensitive_ix] = [self.sensitive_ix] - # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) + # self.sensitive_ix = "%s_%s" % (self.sensitive_ix,self.original_data[self.sensitive_ix][0]) self.target = self.num_data[self.target_ix] self.num_data = self.num_data.drop([self.target_ix], axis=1) @@ -428,7 +428,7 @@ def plot_series_with_baseline(series, args, xlabel, ylabel, baseline): pp = PdfPages( 'figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') print ( - 'Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') + 'Writing to figure-' + args.measure + '-' + args.dataset.name + '-' + args.dataset.sensitive_ix + '-' + args.classifier + '.pdf') pp.savefig() pp.close() plt.show() diff --git a/qii.py b/qii.py index 56e30f2..a9df240 100644 --- a/qii.py +++ b/qii.py @@ -134,7 +134,6 @@ def eval_class_average_unary(dataset, args, dat): def get_feature_variation_plots(features_list, dataset, args, dat): - def plot_histogram(dataframe): data = dataframe.copy() data = data.drop(['feature', 'class'], axis=1) From 0e1648c2903c521c98c5ecb89da589a8e7cb5465 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 12 Dec 2017 00:36:40 -0800 Subject: [PATCH 30/31] adding plots for histograms --- qii.py | 40 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/qii.py b/qii.py index a9df240..6c47d57 100644 --- a/qii.py +++ b/qii.py @@ -3,7 +3,7 @@ author: mostly Shayak """ - +import pdb import time import pandas as pd import numpy @@ -141,15 +141,49 @@ def plot_histogram(dataframe): data.hist() del data + x_test = dat.x_test.reset_index(drop=True) + y_test = dat.y_test.reset_index(drop=True) + temp = x_test.copy() + temp['class'] = y_test + features = numpy.array(features_list.keys()) + for feature in features: + plt.figure() + # bins = numpy.unique(temp[feature]) + for class_index, class_group in temp.groupby(['class']): + # plt.hist(class_group[feature], bins=bins, label=str(class_index)) + plt.hist(class_group[feature], label=str(class_index)) + plt.legend(loc='best') + plt.title('Combined Histogram ' + str(feature)) + if args.output_pdf: + pp = PdfPages('Combined Histogram-' + str(feature) + '-'+ args.classifier + '.pdf') + print ('Writing to Combined Histogram-' + str(feature) + '-'+ args.classifier + '.pdf') + pp.savefig(bbox_inches='tight') + pp.close() + if args.show_plot: + plt.show() + + + feature_variations = pd.DataFrame() for cls in dat.y_test.unique(): - x_test = dat.x_test.reset_index(drop=True) - y_test = dat.y_test.reset_index(drop=True) x_target_class = x_test[y_test == cls] feature_variations = feature_variations.append(qii_lib.get_feature_variations(features_list, dataset, dat.cls, dat.x_test, x_target_class, cls)) + # features = numpy.array(features_list.keys()) + for feature in features: + plt.figure() + x_target_class[feature].hist() + plt.title(str(feature) + '-' + 'class_' + str(cls)) + if args.output_pdf: + pp = PdfPages('Histogram-' + str(feature) + '-' + 'class_' + str(cls) + args.classifier + '.pdf') + print ('Writing to Histogram-' + str(feature) + '-' + 'class_' + str(cls) + args.classifier + '.pdf') + pp.savefig(bbox_inches='tight') + pp.close() + if args.show_plot: + plt.show() + for index, group in feature_variations.groupby(['feature']): plt.figure() for class_index, class_group in group.groupby(['class']): From 9026801878051718d5881000e0037e27ddcbe200 Mon Sep 17 00:00:00 2001 From: Rajkiran Date: Tue, 12 Dec 2017 00:52:34 -0800 Subject: [PATCH 31/31] removing individual histograms --- qii.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/qii.py b/qii.py index 6c47d57..70ef1ee 100644 --- a/qii.py +++ b/qii.py @@ -172,17 +172,17 @@ def plot_histogram(dataframe): x_target_class, cls)) # features = numpy.array(features_list.keys()) - for feature in features: - plt.figure() - x_target_class[feature].hist() - plt.title(str(feature) + '-' + 'class_' + str(cls)) - if args.output_pdf: - pp = PdfPages('Histogram-' + str(feature) + '-' + 'class_' + str(cls) + args.classifier + '.pdf') - print ('Writing to Histogram-' + str(feature) + '-' + 'class_' + str(cls) + args.classifier + '.pdf') - pp.savefig(bbox_inches='tight') - pp.close() - if args.show_plot: - plt.show() + # for feature in features: + # plt.figure() + # x_target_class[feature].hist() + # plt.title(str(feature) + '-' + 'class_' + str(cls)) + # if args.output_pdf: + # pp = PdfPages('Histogram-' + str(feature) + '-' + 'class_' + str(cls) + '_' + args.classifier + '.pdf') + # print ('Writing to Histogram-' + str(feature) + '-' + 'class_' + str(cls) + '_' + args.classifier + '.pdf') + # pp.savefig(bbox_inches='tight') + # pp.close() + # if args.show_plot: + # plt.show() for index, group in feature_variations.groupby(['feature']): plt.figure()