From e10700e585f7673e959f20dac9c4fcdada1cc0aa Mon Sep 17 00:00:00 2001 From: yennanliu Date: Tue, 7 Nov 2023 11:13:59 +0800 Subject: [PATCH] add pq data structure, update cheatsheet --- README.md | 17 +- .../python/materials-queue/README.md | 251 ++++++++++++++++++ .../python/materials-queue/constraints.txt | 24 ++ .../message_brokers/kafka/consumer.py | 8 + .../message_brokers/kafka/docker-compose.yml | 22 ++ .../message_brokers/kafka/producer.py | 8 + .../message_brokers/rabbitmq/consumer.py | 19 ++ .../message_brokers/rabbitmq/producer.py | 14 + .../message_brokers/redis/publisher.py | 8 + .../message_brokers/redis/subscriber.py | 11 + .../python/materials-queue/requirements.txt | 9 + .../materials-queue/src/async_queues.py | 101 +++++++ .../python/materials-queue/src/graph.py | 155 +++++++++++ .../materials-queue/src/multiprocess_queue.py | 123 +++++++++ .../python/materials-queue/src/queues.py | 75 ++++++ .../python/materials-queue/src/roadmap.dot | 230 ++++++++++++++++ .../materials-queue/src/thread_safe_queues.py | 190 +++++++++++++ data_structure/python/pq_1.py | 32 +++ data_structure/python/pq_2.py | 81 ++++++ data_structure/python/pq_3.py | 44 +++ doc/cheatsheet/priority_queue.md | 11 +- 21 files changed, 1424 insertions(+), 9 deletions(-) create mode 100644 data_structure/python/materials-queue/README.md create mode 100644 data_structure/python/materials-queue/constraints.txt create mode 100644 data_structure/python/materials-queue/message_brokers/kafka/consumer.py create mode 100644 data_structure/python/materials-queue/message_brokers/kafka/docker-compose.yml create mode 100644 data_structure/python/materials-queue/message_brokers/kafka/producer.py create mode 100644 data_structure/python/materials-queue/message_brokers/rabbitmq/consumer.py create mode 100644 data_structure/python/materials-queue/message_brokers/rabbitmq/producer.py create mode 100644 data_structure/python/materials-queue/message_brokers/redis/publisher.py create mode 100644 data_structure/python/materials-queue/message_brokers/redis/subscriber.py create mode 100644 data_structure/python/materials-queue/requirements.txt create mode 100644 data_structure/python/materials-queue/src/async_queues.py create mode 100644 data_structure/python/materials-queue/src/graph.py create mode 100644 data_structure/python/materials-queue/src/multiprocess_queue.py create mode 100644 data_structure/python/materials-queue/src/queues.py create mode 100644 data_structure/python/materials-queue/src/roadmap.dot create mode 100644 data_structure/python/materials-queue/src/thread_safe_queues.py create mode 100644 data_structure/python/pq_1.py create mode 100644 data_structure/python/pq_2.py create mode 100644 data_structure/python/pq_3.py diff --git a/README.md b/README.md index 72860e84..0341658f 100644 --- a/README.md +++ b/README.md @@ -146,18 +146,19 @@ ## Data Structure | # | Title | Solution | Use case | Comment | Status| | --- | ----- | -------- | ---- | ----- | ---- | -||Queue| [Python ](./data_structure/python/queue2.py), [JS](./data_structure/js/queue.js) | | | AGAIN*| -||Stack| [Python ](./data_structure/python/stack.py), [JS stack (via linkedlist)](./data_structure/js/stack_linkedlist.js), [JS - stack (via array)](./data_structure/js/stack_array.js) | | | OK| -||LinkedList| [Python](./data_structure/python/linkedList.py), [JS](./data_structure/js/linkedlist.js), [Java](./data_structure/java/LinkedinList.java) | | | OK**| +||Queue| [Py](./data_structure/python/queue2.py), [JS](./data_structure/js/queue.js) | | | AGAIN*| +||Stack| [Py](./data_structure/python/stack.py), [JS (linkedlist)](./data_structure/js/stack_linkedlist.js), [JS (array)](./data_structure/js/stack_array.js) | | | OK| +||LinkedList| [Py](./data_structure/python/linkedList.py), [JS](./data_structure/js/linkedlist.js), [Java](./data_structure/java/LinkedinList.java) | | | OK**| ||Doubly LinkedList| [Python](./data_structure/python/doublylinkedlist.py), [JS](./data_structure/js/doublylinkedList.js) | | | AGAIN| -||Tree| [Python ](./data_structure/python/tree.py) | | | AGAIN**| -||Trie| [Python ](./data_structure/python/trie.py) | | | AGAIN| +||Tree| [Py](./data_structure/python/tree.py) | | | AGAIN**| +||Trie| [Py](./data_structure/python/trie.py) | | | AGAIN| ||Heap| [heap.py](./data_structure/python/heap.py), [MinHeap.py](./data_structure/python/MinHeap.py), [MaxHeap.py](./data_structure/python/MaxHeap.py), [MinHeap.java](./leetcode_java/src/main/java/AlgorithmJava/MinHeap.java), [MaxHeap.java](./leetcode_java/src/main/java/AlgorithmJava/MaxHeap.java) | | | AGAIN| -||Array| [Python ](./data_structure/python/array.py) | | | AGAIN*| -||Graph| [Python ](./data_structure/python/graph.py), [JS](./data_structure/js/graph.js). [Java-graph](./algorithm/java/Graph.java), [Java-graph-client](./algorithm/java/GraphClient.java) | | | OK***| +||Array| [Py](./data_structure/python/array.py) | | | AGAIN*| +||Graph| [Py](./data_structure/python/graph.py), [JS](./data_structure/js/graph.js). [Java1](./algorithm/java/Graph.java), [Java2](./algorithm/java/GraphClient.java) | | | OK***| ||Binary search Tree (BST)| [Python](./data_structure/python/binary_search_tree.py), [JS](./data_structure/js/binary_search_tree.js), [Scala](./data_structure/scala/binarySearch.scala), [Java](./data_structure/java/BST.java) | | | AGAIN| -||Hash table| [Python](./data_structure/python/hash_table.py), [JS](./data_structure/js/hash_table.js) | usually for improving `time complexity B(O)` via extra space complexity (time-space tradeoff)|`good basic`| AGAIN****| +||Hash table| [Py](./data_structure/python/hash_table.py), [JS](./data_structure/js/hash_table.js) | usually for improving `time complexity B(O)` via extra space complexity (time-space tradeoff)|`good basic`| AGAIN****| ||DirectedEdge| [Java](./data_structure/java/DirectedEdge.java) | | | AGAIN| +||Priority Queue (PQ)| [Py 1](./data_structure/python/pq_1.py), [Py 2](./data_structure/python/pq_2.py), [Py 3](./data_structure/python/pq_3.py) | | | AGAIN| ## Algorithm diff --git a/data_structure/python/materials-queue/README.md b/data_structure/python/materials-queue/README.md new file mode 100644 index 00000000..a7b6f55c --- /dev/null +++ b/data_structure/python/materials-queue/README.md @@ -0,0 +1,251 @@ +# Python Stacks, Queues, and Priority Queues in Practice + +Sample code supplementing the tutorial on [Python queues](https://realpython.com/queue-in-python/) hosted on Real Python. + +## Installation + +To get started, create and activate a new virtual environment, and then install the required dependencies into it: + +```shell +$ python3 -m venv venv/ --prompt=queue +$ source venv/bin/activate +(queue) $ python -m pip install -r requirements.txt -c constraints.txt +``` + +## Usage + +### Queue Implementation + +Change directory to `src/` and run the interactive Python interpreter: + +```shell +(queue) $ cd src/ +(queue) $ python -q +``` + +Then, import various queue data types from the `queues` module and start using them: + +```python +>>> from queues import Queue, Stack, PriorityQueue + +>>> fifo, stack, heap = Queue(), Stack(), PriorityQueue() +>>> for priority, element in enumerate(["1st", "2nd", "3rd"]): +... fifo.enqueue(element) +... stack.enqueue(element) +... heap.enqueue_with_priority(priority, element) + +>>> for elements in zip(fifo, stack, heap): +... print(elements) +... +('1st', '3rd', '3rd') +('2nd', '2nd', '2nd') +('3rd', '1st', '1st') +``` + +### Graph Algorithms + +Change directory to `src/` and run the interactive Python interpreter: + +```shell +(queue) $ cd src/ +(queue) $ python -q +``` + +Then, import various `graph` module members and start using them: + +```python +>>> from graph import * + +>>> nodes, graph = load_graph("roadmap.dot", City.from_dict) + +>>> city1 = nodes["london"] +>>> city2 = nodes["edinburgh"] + +>>> def distance(weights): +... return float(weights["distance"]) + +>>> for city in dijkstra_shortest_path(graph, city1, city2, distance): +... print(city.name) +... +City of London +St Albans +Coventry +Birmingham +Stoke-on-Trent +Manchester +Salford +Preston +Lancaster +Carlisle +Edinburgh + +>>> for city in shortest_path(graph, city1, city2): +... print(city.name) +... +City of London +Bristol +Newport +St Asaph +Liverpool +Preston +Lancaster +Carlisle +Edinburgh + +>>> connected(graph, city1, city2) +True + +>>> def is_twentieth_century(city): +... return city.year and 1901 <= city.year <= 2000 + +>>> breadth_first_search(graph, city2, is_twentieth_century) +City( + name='Lancaster', + country='England', + year=1937, + latitude=54.047, + longitude=-2.801 +) + +>>> depth_first_search(graph, city2, is_twentieth_century) +City( + name='Lancaster', + country='England', + year=1937, + latitude=54.047, + longitude=-2.801 +) +``` + +### Thread-Safe Queues + +Change directory to `src/` and run the script with optional parameters. For example: + +```shell +(queue) $ cd src/ +(queue) $ python thread_safe_queues.py --queue fifo \ + --producers 3 \ + --consumers 2 \ + --producer-speed 1 \ + --consumer-speed 1 +``` + +**Parameters:** + +| Short Name | Long Name | Value | +|-----------:|-------------------:|------------------------| +| `-q` | `--queue` | `fifo`, `lifo`, `heap` | +| `-p` | `--producers` | number | +| `-c` | `--consumers` | number | +| `-ps` | `--producer-speed` | number | +| `-cs` | `--consumer-speed` | number | + +### Asynchronous Queues + +Change directory to `src/` and run the script with a mandatory URL and optional parameters: + +```shell +(queue) $ cd src/ +(queue) $ python async_queues.py http://localhost:8000/ --max-depth 2 \ + --num-workers 3 +``` + +**Parameters:** + +| Short Name | Long Name | Value | +|-----------:|----------------:|--------| +| `-d` | `--max-depth` | number | +| `-w` | `--num-workers` | number | + +Note that to change between the available queue types, you'll need to edit your `main()` coroutine function: + +```python +# async_queues.py + +# ... + +async def main(args): + session = aiohttp.ClientSession() + try: + links = Counter() + queue = asyncio.Queue() + # queue = asyncio.LifoQueue() + # queue = asyncio.PriorityQueue() + +# ... +``` + +### Multiprocessing Queue + +Change directory to `src/` and run the script with a mandatory MD5 hash value and optional parameters: + +```shell +(queue) $ cd src/ +(queue) $ python async_queues.py a9d1cbf71942327e98b40cf5ef38a960 -m 6 -w 4 +``` + +**Parameters:** + +| Short Name | Long Name | Value | +|-----------:|----------------:|--------| +| `-m` | `--max-length` | number | +| `-w` | `--num-workers` | number | + +The maximum length determines the maximum number of characters in a text to guess. If you skip the number of workers, then the script will create as many of them as the number of CPU cores detected. + +### Message Brokers + +#### RabbitMQ + +Start a RabbitMQ broker with Docker: + +```shell +$ docker run -it --rm --name rabbitmq -p 5672:5672 rabbitmq +``` + +Open separate terminal windows, activate your virtual environment, change directory to `message_brokers/rabbitmq/`, and run your producer and consumer scripts: + +```shell +(queue) $ cd message_brokers/rabbitmq/ +(queue) $ python producer.py +(queue) $ python consumer.py +``` + +You can have as many producers and consumers as you like. + +#### Redis + +Start a Redis server with Docker: + +```shell +$ docker run -it --rm --name redis -p 6379:6379 redis +``` + +Open separate terminal windows, activate your virtual environment, change directory to `message_brokers/redis/`, and run your publisher and subscriber scripts: + +```shell +(queue) $ cd message_brokers/redis/ +(queue) $ python publisher.py +(queue) $ python subscriber.py +``` + +You can have as many publishers and subscribers as you like. + +#### Apache Kafka + +Change directory to `message_brokers/kafka/` and start an Apache Kafka cluster with Docker Compose: + +```shell +$ cd message_brokers/kafka/ +$ docker-compose up +``` + +Open separate terminal windows, activate your virtual environment, change directory to `message_brokers/kafka/`, and run your producer and consumer scripts: + +```shell +(queue) $ cd message_brokers/kafka/ +(queue) $ python producer.py +(queue) $ python consumer.py +``` + +You can have as many producers and consumers as you like. diff --git a/data_structure/python/materials-queue/constraints.txt b/data_structure/python/materials-queue/constraints.txt new file mode 100644 index 00000000..47815c1c --- /dev/null +++ b/data_structure/python/materials-queue/constraints.txt @@ -0,0 +1,24 @@ +aiohttp==3.8.1 +aiosignal==1.2.0 +async-timeout==4.0.2 +attrs==21.4.0 +beautifulsoup4==4.11.1 +charset-normalizer==2.1.0 +commonmark==0.9.1 +Deprecated==1.2.13 +frozenlist==1.3.0 +idna==3.3 +kafka-python3==3.0.0 +multidict==6.0.2 +networkx==2.8.4 +packaging==21.3 +pika==1.2.1 +pydot==1.4.2 +Pygments==2.12.0 +pygraphviz==1.9 +pyparsing==3.0.9 +redis==4.3.3 +rich==12.4.4 +soupsieve==2.3.2.post1 +wrapt==1.14.1 +yarl==1.7.2 diff --git a/data_structure/python/materials-queue/message_brokers/kafka/consumer.py b/data_structure/python/materials-queue/message_brokers/kafka/consumer.py new file mode 100644 index 00000000..1a1a534a --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/kafka/consumer.py @@ -0,0 +1,8 @@ +# consumer.py + +from kafka3 import KafkaConsumer + +consumer = KafkaConsumer("datascience") +for record in consumer: + message = record.value.decode("utf-8") + print(f"Got message: {message}") diff --git a/data_structure/python/materials-queue/message_brokers/kafka/docker-compose.yml b/data_structure/python/materials-queue/message_brokers/kafka/docker-compose.yml new file mode 100644 index 00000000..215b61ed --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/kafka/docker-compose.yml @@ -0,0 +1,22 @@ +# docker-compose.yml + +version: "3" +services: + zookeeper: + image: 'bitnami/zookeeper:latest' + ports: + - '2181:2181' + environment: + - ALLOW_ANONYMOUS_LOGIN=yes + kafka: + image: 'bitnami/kafka:latest' + ports: + - '9092:9092' + environment: + - KAFKA_BROKER_ID=1 + - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092 + - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - ALLOW_PLAINTEXT_LISTENER=yes + depends_on: + - zookeeper diff --git a/data_structure/python/materials-queue/message_brokers/kafka/producer.py b/data_structure/python/materials-queue/message_brokers/kafka/producer.py new file mode 100644 index 00000000..f3d24d41 --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/kafka/producer.py @@ -0,0 +1,8 @@ +# producer.py + +from kafka3 import KafkaProducer + +producer = KafkaProducer(bootstrap_servers="localhost:9092") +while True: + message = input("Message: ") + producer.send(topic="datascience", value=message.encode("utf-8")) diff --git a/data_structure/python/materials-queue/message_brokers/rabbitmq/consumer.py b/data_structure/python/materials-queue/message_brokers/rabbitmq/consumer.py new file mode 100644 index 00000000..63d8f19e --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/rabbitmq/consumer.py @@ -0,0 +1,19 @@ +# consumer.py + +import pika + +QUEUE_NAME = "mailbox" + + +def callback(channel, method, properties, body): + message = body.decode("utf-8") + print(f"Got message: {message}") + + +with pika.BlockingConnection() as connection: + channel = connection.channel() + channel.queue_declare(queue=QUEUE_NAME) + channel.basic_consume( + queue=QUEUE_NAME, auto_ack=True, on_message_callback=callback + ) + channel.start_consuming() diff --git a/data_structure/python/materials-queue/message_brokers/rabbitmq/producer.py b/data_structure/python/materials-queue/message_brokers/rabbitmq/producer.py new file mode 100644 index 00000000..1a0146da --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/rabbitmq/producer.py @@ -0,0 +1,14 @@ +# producer.py + +import pika + +QUEUE_NAME = "mailbox" + +with pika.BlockingConnection() as connection: + channel = connection.channel() + channel.queue_declare(queue=QUEUE_NAME) + while True: + message = input("Message: ") + channel.basic_publish( + exchange="", routing_key=QUEUE_NAME, body=message.encode("utf-8") + ) diff --git a/data_structure/python/materials-queue/message_brokers/redis/publisher.py b/data_structure/python/materials-queue/message_brokers/redis/publisher.py new file mode 100644 index 00000000..8a8b25c7 --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/redis/publisher.py @@ -0,0 +1,8 @@ +# publisher.py + +import redis + +with redis.Redis() as client: + while True: + message = input("Message: ") + client.publish("chatroom", message) diff --git a/data_structure/python/materials-queue/message_brokers/redis/subscriber.py b/data_structure/python/materials-queue/message_brokers/redis/subscriber.py new file mode 100644 index 00000000..7007c487 --- /dev/null +++ b/data_structure/python/materials-queue/message_brokers/redis/subscriber.py @@ -0,0 +1,11 @@ +# subscriber.py + +import redis + +with redis.Redis() as client: + pubsub = client.pubsub() + pubsub.subscribe("chatroom") + for message in pubsub.listen(): + if message["type"] == "message": + body = message["data"].decode("utf-8") + print(f"Got message: {body}") diff --git a/data_structure/python/materials-queue/requirements.txt b/data_structure/python/materials-queue/requirements.txt new file mode 100644 index 00000000..b5b425f6 --- /dev/null +++ b/data_structure/python/materials-queue/requirements.txt @@ -0,0 +1,9 @@ +aiohttp +beautifulsoup4 +kafka-python3 +networkx +pika +pydot +pygraphviz +redis +rich diff --git a/data_structure/python/materials-queue/src/async_queues.py b/data_structure/python/materials-queue/src/async_queues.py new file mode 100644 index 00000000..8eb28d00 --- /dev/null +++ b/data_structure/python/materials-queue/src/async_queues.py @@ -0,0 +1,101 @@ +# async_queues.py + +import argparse +import asyncio +import sys +from collections import Counter +from typing import NamedTuple +from urllib.parse import urljoin + +import aiohttp +from bs4 import BeautifulSoup + + +class Job(NamedTuple): + url: str + depth: int = 1 + + def __lt__(self, other): + if isinstance(other, Job): + return len(self.url) < len(other.url) + + +async def main(args): + session = aiohttp.ClientSession() + try: + links = Counter() + queue = asyncio.Queue() + # queue = asyncio.LifoQueue() + # queue = asyncio.PriorityQueue() + tasks = [ + asyncio.create_task( + worker( + f"Worker-{i + 1}", + session, + queue, + links, + args.max_depth, + ) + ) + for i in range(args.num_workers) + ] + + await queue.put(Job(args.url)) + await queue.join() + + for task in tasks: + task.cancel() + + await asyncio.gather(*tasks, return_exceptions=True) + + display(links) + finally: + await session.close() + + +async def worker(worker_id, session, queue, links, max_depth): + print(f"[{worker_id} starting]", file=sys.stderr) + while True: + url, depth = await queue.get() + links[url] += 1 + try: + if depth <= max_depth: + print(f"[{worker_id} {depth=} {url=}]", file=sys.stderr) + if html := await fetch_html(session, url): + for link_url in parse_links(url, html): + await queue.put(Job(link_url, depth + 1)) + except aiohttp.ClientError: + print(f"[{worker_id} failed at {url=}]", file=sys.stderr) + finally: + queue.task_done() + + +async def fetch_html(session, url): + async with session.get(url) as response: + if response.ok and response.content_type == "text/html": + return await response.text() + + +def parse_links(url, html): + soup = BeautifulSoup(html, features="html.parser") + for anchor in soup.select("a[href]"): + href = anchor.get("href").lower() + if not href.startswith("javascript:"): + yield urljoin(url, href) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("url") + parser.add_argument("-d", "--max-depth", type=int, default=2) + parser.add_argument("-w", "--num-workers", type=int, default=3) + return parser.parse_args() + + +def display(links): + for url, count in links.most_common(): + print(f"{count:>3} {url}") + + +if __name__ == "__main__": + asyncio.run(main(parse_args())) diff --git a/data_structure/python/materials-queue/src/graph.py b/data_structure/python/materials-queue/src/graph.py new file mode 100644 index 00000000..ee87f7f7 --- /dev/null +++ b/data_structure/python/materials-queue/src/graph.py @@ -0,0 +1,155 @@ +# graph.py + +from collections import deque +from math import inf as infinity +from typing import NamedTuple + +import networkx as nx + +from queues import MutableMinHeap, Queue, Stack + + +class City(NamedTuple): + name: str + country: str + year: int | None + latitude: float + longitude: float + + @classmethod + def from_dict(cls, attrs): + return cls( + name=attrs["xlabel"], + country=attrs["country"], + year=int(attrs["year"]) or None, + latitude=float(attrs["latitude"]), + longitude=float(attrs["longitude"]), + ) + + +def load_graph(filename, node_factory): + graph = nx.nx_agraph.read_dot(filename) + nodes = { + name: node_factory(attributes) + for name, attributes in graph.nodes(data=True) + } + return nodes, nx.Graph( + (nodes[name1], nodes[name2], weights) + for name1, name2, weights in graph.edges(data=True) + ) + + +def breadth_first_traverse(graph, source, order_by=None): + queue = Queue(source) + visited = {source} + while queue: + yield (node := queue.dequeue()) + neighbors = list(graph.neighbors(node)) + if order_by: + neighbors.sort(key=order_by) + for neighbor in neighbors: + if neighbor not in visited: + visited.add(neighbor) + queue.enqueue(neighbor) + + +def breadth_first_search(graph, source, predicate, order_by=None): + return search(breadth_first_traverse, graph, source, predicate, order_by) + + +def shortest_path(graph, source, destination, order_by=None): + queue = Queue(source) + visited = {source} + previous = {} + while queue: + node = queue.dequeue() + neighbors = list(graph.neighbors(node)) + if order_by: + neighbors.sort(key=order_by) + for neighbor in neighbors: + if neighbor not in visited: + visited.add(neighbor) + queue.enqueue(neighbor) + previous[neighbor] = node + if neighbor == destination: + return retrace(previous, source, destination) + + +def retrace(previous, source, destination): + path = deque() + + current = destination + while current != source: + path.appendleft(current) + current = previous.get(current) + if current is None: + return None + + path.appendleft(source) + return list(path) + + +def connected(graph, source, destination): + return shortest_path(graph, source, destination) is not None + + +def depth_first_traverse(graph, source, order_by=None): + stack = Stack(source) + visited = set() + while stack: + if (node := stack.dequeue()) not in visited: + yield node + visited.add(node) + neighbors = list(graph.neighbors(node)) + if order_by: + neighbors.sort(key=order_by) + for neighbor in reversed(neighbors): + stack.enqueue(neighbor) + + +def recursive_depth_first_traverse(graph, source, order_by=None): + visited = set() + + def visit(node): + yield node + visited.add(node) + neighbors = list(graph.neighbors(node)) + if order_by: + neighbors.sort(key=order_by) + for neighbor in neighbors: + if neighbor not in visited: + yield from visit(neighbor) + + return visit(source) + + +def depth_first_search(graph, source, predicate, order_by=None): + return search(depth_first_traverse, graph, source, predicate, order_by) + + +def search(traverse, graph, source, predicate, order_by=None): + for node in traverse(graph, source, order_by): + if predicate(node): + return node + + +def dijkstra_shortest_path(graph, source, destination, weight_factory): + previous = {} + visited = set() + + unvisited = MutableMinHeap() + for node in graph.nodes: + unvisited[node] = infinity + unvisited[source] = 0 + + while unvisited: + visited.add(node := unvisited.dequeue()) + for neighbor, weights in graph[node].items(): + if neighbor not in visited: + weight = weight_factory(weights) + new_distance = unvisited[node] + weight + if new_distance < unvisited[neighbor]: + unvisited[neighbor] = new_distance + previous[neighbor] = node + + return retrace(previous, source, destination) diff --git a/data_structure/python/materials-queue/src/multiprocess_queue.py b/data_structure/python/materials-queue/src/multiprocess_queue.py new file mode 100644 index 00000000..19a02a8b --- /dev/null +++ b/data_structure/python/materials-queue/src/multiprocess_queue.py @@ -0,0 +1,123 @@ +# multiprocess_queue.py + +import argparse +import multiprocessing +import queue +import time +from dataclasses import dataclass +from hashlib import md5 +from string import ascii_lowercase + +POISON_PILL = None + + +class Combinations: + def __init__(self, alphabet, length): + self.alphabet = alphabet + self.length = length + + def __len__(self): + return len(self.alphabet) ** self.length + + def __getitem__(self, index): + if index >= len(self): + raise IndexError + return "".join( + self.alphabet[ + (index // len(self.alphabet) ** i) % len(self.alphabet) + ] + for i in reversed(range(self.length)) + ) + + +@dataclass(frozen=True) +class Job: + combinations: Combinations + start_index: int + stop_index: int + + def __call__(self, hash_value): + for index in range(self.start_index, self.stop_index): + text_bytes = self.combinations[index].encode("utf-8") + hashed = md5(text_bytes).hexdigest() + if hashed == hash_value: + return text_bytes.decode("utf-8") + + +class Worker(multiprocessing.Process): + def __init__(self, queue_in, queue_out, hash_value): + super().__init__(daemon=True) + self.queue_in = queue_in + self.queue_out = queue_out + self.hash_value = hash_value + + def run(self): + while True: + job = self.queue_in.get() + if job is POISON_PILL: + self.queue_in.put(POISON_PILL) + break + if plaintext := job(self.hash_value): + self.queue_out.put(plaintext) + break + + +def main(args): + t1 = time.perf_counter() + + queue_in = multiprocessing.Queue() + queue_out = multiprocessing.Queue() + + workers = [ + Worker(queue_in, queue_out, args.hash_value) + for _ in range(args.num_workers) + ] + + for worker in workers: + worker.start() + + for text_length in range(1, args.max_length + 1): + combinations = Combinations(ascii_lowercase, text_length) + for indices in chunk_indices(len(combinations), len(workers)): + queue_in.put(Job(combinations, *indices)) + + queue_in.put(POISON_PILL) + + while any(worker.is_alive() for worker in workers): + try: + solution = queue_out.get(timeout=0.1) + if solution: + t2 = time.perf_counter() + print(f"{solution} (found in {t2 - t1:.1f}s)") + break + except queue.Empty: + pass + else: + print("Unable to find a solution") + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("hash_value") + parser.add_argument("-m", "--max-length", type=int, default=6) + parser.add_argument( + "-w", + "--num-workers", + type=int, + default=multiprocessing.cpu_count(), + ) + return parser.parse_args() + + +def chunk_indices(length, num_chunks): + start = 0 + while num_chunks > 0: + num_chunks = min(num_chunks, length) + chunk_size = round(length / num_chunks) + yield start, (start := start + chunk_size) + length -= chunk_size + num_chunks -= 1 + + +if __name__ == "__main__": + main(parse_args()) diff --git a/data_structure/python/materials-queue/src/queues.py b/data_structure/python/materials-queue/src/queues.py new file mode 100644 index 00000000..aa85d5ee --- /dev/null +++ b/data_structure/python/materials-queue/src/queues.py @@ -0,0 +1,75 @@ +# queues.py + +from collections import deque +from dataclasses import dataclass +from heapq import heapify, heappop, heappush +from itertools import count +from typing import Any + + +class IterableMixin: + def __len__(self): + return len(self._elements) + + def __iter__(self): + while len(self) > 0: + yield self.dequeue() + + +class Queue(IterableMixin): + def __init__(self, *elements): + self._elements = deque(elements) + + def enqueue(self, element): + self._elements.append(element) + + def dequeue(self): + return self._elements.popleft() + + +class Stack(Queue): + def dequeue(self): + return self._elements.pop() + + +class PriorityQueue(IterableMixin): + def __init__(self): + self._elements = [] + self._counter = count() + + def enqueue_with_priority(self, priority, value): + element = (-priority, next(self._counter), value) + heappush(self._elements, element) + + def dequeue(self): + return heappop(self._elements)[-1] + + +@dataclass(order=True) +class Element: + priority: float + count: int + value: Any + + +class MutableMinHeap(IterableMixin): + def __init__(self): + super().__init__() + self._elements_by_value = {} + self._elements = [] + self._counter = count() + + def __setitem__(self, unique_value, priority): + if unique_value in self._elements_by_value: + self._elements_by_value[unique_value].priority = priority + heapify(self._elements) + else: + element = Element(priority, next(self._counter), unique_value) + self._elements_by_value[unique_value] = element + heappush(self._elements, element) + + def __getitem__(self, unique_value): + return self._elements_by_value[unique_value].priority + + def dequeue(self): + return heappop(self._elements).value diff --git a/data_structure/python/materials-queue/src/roadmap.dot b/data_structure/python/materials-queue/src/roadmap.dot new file mode 100644 index 00000000..f7458e85 --- /dev/null +++ b/data_structure/python/materials-queue/src/roadmap.dot @@ -0,0 +1,230 @@ +graph "Cities in the United Kingdom" { + + graph [layout=fdp, bgcolor=white] + node [shape=point, color=gray15, height=0.75, fontsize=100,] + edge [color=gray15, penwidth=2.5, fontsize=60] + + subgraph northern_ireland { + node [country="Northern Ireland"] + armagh [year=1994, latitude=54.3499, longitude=-6.6546, xlabel="Armagh" pos="7,74!"] + belfast [year=1888, latitude=54.596389, longitude=-5.93, xlabel="Belfast" pos="15,79!"] + derry [year=1604, latitude=54.9975, longitude=-7.32, xlabel="Derry" pos="0,86!"] + lisburn [year=2002, latitude=54.512, longitude=-6.031, xlabel="Lisburn" pos="14,77!"] + newry [year=2002, latitude=54.176, longitude=-6.349, xlabel="Newry" pos="10,71!"] + } + + subgraph wales { + node [country="Wales"] + bangor [year=0, latitude=53.228, longitude=-4.128, xlabel="Bangor" pos="35,53!"] + cardiff [year=1905, latitude=51.481667, longitude=-3.179167, xlabel="Cardiff" pos="46,21!"] + newport [year=2002, latitude=51.583333, longitude=-3, xlabel="Newport" pos="48,23!"] + st_asaph [year=2012, latitude=53.258, longitude=-3.442, xlabel="St Asaph" pos="43,53!"] + st_davids [year=1994, latitude=51.882, longitude=-5.269, xlabel="St Davids" pos="22,28!"] + swansea [year=1969, latitude=51.616667, longitude=-3.95, xlabel="Swansea" pos="37,23!"] + } + + subgraph scotland { + node [country="Scotland"] + aberdeen [year=1891, latitude=57.15, longitude=-2.11, xlabel="Aberdeen" pos="57,129!"] + dundee [year=1889, latitude=56.462, longitude=-2.9707, xlabel="Dundee" pos="48,115!"] + edinburgh [year=1329, latitude=55.953333, longitude=-3.189167, xlabel="Edinburgh" pos="45,105!"] + glasgow [year=1492, latitude=55.861111, longitude=-4.25, xlabel="Glasgow" pos="34,103!"] + inverness [year=2001, latitude=57.4778, longitude=-4.2247, xlabel="Inverness" pos="34,136!"] + perth [year=2012, latitude=56.395833, longitude=-3.433333, xlabel="Perth" pos="43,114!"] + stirling [year=2002, latitude=56.1166, longitude=-3.9369, xlabel="Stirling" pos="37,108!"] + } + + subgraph england { + node [country="England"] + bath [year=1090, latitude=51.38, longitude=-2.36, xlabel="Bath" pos="55,19!"] + birmingham [year=1889, latitude=52.48, longitude=-1.9025, xlabel="Birmingham" pos="60,39!"] + bradford [year=1897, latitude=53.792, longitude=-1.754, xlabel="Bradford" pos="61,63!"] + brighton [year=2001, latitude=50.827778, longitude=-0.152778, xlabel="Brighton & Hove" pos="79,9!"] + bristol [year=1542, latitude=51.45, longitude=-2.583333, xlabel="Bristol" pos="52,20!"] + cambridge [year=1951, latitude=52.205278, longitude=0.119167, xlabel="Cambridge" pos="82,34!"] + canterbury [year=0, latitude=51.28, longitude=1.08, xlabel="Canterbury" pos="93,17!"] + carlisle [year=1133, latitude=54.890833, longitude=-2.943889, xlabel="Carlisle" pos="48,84!"] + chelmsford [year=2012, latitude=51.735278, longitude=0.479167, xlabel="Chelmsford" pos="86,26!"] + chester [year=1541, latitude=53.19, longitude=-2.89, xlabel="Chester" pos="49,52!"] + chichester [year=1075, latitude=50.8365, longitude=-0.7792, xlabel="Chichester" pos="72,10!"] + coventry [year=1102, latitude=52.408056, longitude=-1.510556, xlabel="Coventry" pos="64,38!"] + derby [year=1977, latitude=52.916667, longitude=-1.483333, xlabel="Derby" pos="64,47!"] + durham [year=995, latitude=54.7761, longitude=-1.5733, xlabel="Durham" pos="63,82!"] + ely [year=1109, latitude=52.398056, longitude=0.262222, xlabel="Ely" pos="84,37!"] + exeter [year=0, latitude=50.725556, longitude=-3.526944, xlabel="Exeter" pos="42,8!"] + gloucester [year=1541, latitude=51.864444, longitude=-2.244444, xlabel="Gloucester" pos="56,28!"] + hereford [year=0, latitude=52.056, longitude=-2.716, xlabel="Hereford" pos="51,31!"] + kingston [year=1897, latitude=53.744444, longitude=-0.3325, xlabel="Kingston upon Hull" pos="77,62!"] + lancaster [year=1937, latitude=54.047, longitude=-2.801, xlabel="Lancaster" pos="50,68!"] + leeds [year=1893, latitude=53.800268, longitude=-1.549721, xlabel="Leeds" pos="64,63!"] + leicester [year=1919, latitude=52.634444, longitude=-1.131944, xlabel="Leicester" pos="68,42!"] + lichfield [year=0, latitude=52.682, longitude=-1.829, xlabel="Lichfield" pos="61,43!"] + lincoln [year=1072, latitude=53.228333, longitude=-0.538889, xlabel="Lincoln" pos="75,53!"] + liverpool [year=1880, latitude=53.4075, longitude=-2.991944, xlabel="Liverpool" pos="48,56!"] + london [year=0, latitude=51.507222, longitude=-0.1275, xlabel="City of London" pos="80,21!"] + manchester [year=1853, latitude=53.479444, longitude=-2.245278, xlabel="Manchester" pos="56,57!"] + newcastle [year=1882, latitude=54.98, longitude=-1.61, xlabel="Newcastle upon Tyne" pos="63,86!"] + norwich [year=1094, latitude=52.63, longitude=1.297, xlabel="Norwich" pos="95,42!"] + nottingham [year=1897, latitude=52.953333, longitude=-1.15, xlabel="Nottingham" pos="68,48!"] + oxford [year=1542, latitude=51.751944, longitude=-1.257778, xlabel="Oxford" pos="67,26!"] + peterborough [year=1541, latitude=52.5725, longitude=-0.243056, xlabel="Peterborough" pos="78,41!"] + plymouth [year=1928, latitude=50.371389, longitude=-4.142222, xlabel="Plymouth" pos="35,1!"] + portsmouth [year=1926, latitude=50.805833, longitude=-1.087222, xlabel="Portsmouth" pos="69,9!"] + preston [year=2002, latitude=53.759, longitude=-2.699, xlabel="Preston" pos="51,63!"] + ripon [year=1865, latitude=54.138, longitude=-1.524, xlabel="Ripon" pos="64,70!"] + salford [year=1926, latitude=53.509722, longitude=-2.334444, xlabel="Salford" pos="55,58!"] + salisbury [year=1227, latitude=51.07, longitude=-1.79, xlabel="Salisbury" pos="61,14!"] + sheffield [year=1893, latitude=53.380368, longitude=-1.469701, xlabel="Sheffield" pos="65,56!"] + southampton [year=1964, latitude=50.9025, longitude=-1.404167, xlabel="Southampton" pos="65,11!"] + southend_on_sea [year=2022, latitude=51.55, longitude=0.71, xlabel="Southend-on-Sea" pos="89,22!"] + st_albans [year=1877, latitude=51.783333, longitude=-0.333333, xlabel="St Albans" pos="77,26!"] + stoke_on_trent [year=1925, latitude=53, longitude=-2.183333, xlabel="Stoke-on-Trent" pos="57,49!"] + sunderland [year=1992, latitude=54.91, longitude=-1.385, xlabel="Sunderland" pos="66,85!"] + truro [year=1877, latitude=50.26, longitude=-5.051, xlabel="Truro" pos="25,0!"] + wakefield [year=1888, latitude=53.683, longitude=-1.499, xlabel="Wakefield" pos="64,61!"] + wells [year=0, latitude=51.209444, longitude=-2.645, xlabel="Wells" pos="52,16!"] + westminster [year=1540, latitude=51.512222, longitude=-0.163333, xlabel="Westminster" pos="79,22!"] + winchester [year=0, latitude=51.062, longitude=-1.317, xlabel="Winchester" pos="66,14!"] + wolverhampton [year=2001, latitude=52.583333, longitude=-2.133333, xlabel="Wolverhampton" pos="57,41!"] + worcester [year=0, latitude=52.192, longitude=-2.22, xlabel="Worcester" pos="56,34!"] + york [year=0, latitude=53.958333, longitude=-1.080278, xlabel="York" pos="69,66!"] + } + + armagh -- derry [distance=61, label=61] + armagh -- newry [distance=19, label=19] + bangor -- cardiff [distance=185, label=185] + bangor -- st_davids [distance=157, label=157] + bath -- wells [distance=20, label=20] + belfast -- derry [distance=71, label=71] + brighton -- canterbury [distance=91, label=91] + brighton -- chichester [distance=33, label=33] + bristol -- bath [distance=13, label=13] + bristol -- exeter [distance=78, label=78] + bristol -- gloucester [distance=39, label=39] + bristol -- newport [distance=31, label=31] + bristol -- wells [distance=21, label=21] + carlisle -- glasgow [distance=96, label=96] + carlisle -- lancaster [distance=68, label=68] + chester -- bangor [distance=60, label=60] + chester -- hereford [distance=94, label=94] + chester -- liverpool [distance=21, label=21] + chester -- wolverhampton [distance=62, label=62] + coventry -- birmingham [distance=24, label=24] + coventry -- st_albans [distance=74, label=74] + dundee -- aberdeen [distance=66, label=66] + durham -- sunderland [distance=14, label=14] + edinburgh -- carlisle [distance=100, label=100] + edinburgh -- dundee [distance=57, label=57] + edinburgh -- glasgow [distance=47, label=47] + edinburgh -- newcastle [distance=119, label=119] + edinburgh -- perth [distance=44, label=44] + edinburgh -- stirling [distance=37, label=37] + ely -- cambridge [distance=17, label=17] + ely -- norwich [distance=57, label=57] + exeter -- plymouth [distance=52, label=52] + exeter -- salisbury [distance=92, label=92] + exeter -- southampton [distance=109, label=109] + exeter -- truro [distance=89, label=89] + exeter -- wells [distance=60, label=60] + inverness -- aberdeen [distance=103, label=103] + leeds -- bradford [distance=12, label=12] + leeds -- kingston [distance=65, label=65] + leeds -- ripon [distance=33, label=33] + leeds -- york [distance=29, label=29] + leicester -- derby [distance=32, label=32] + leicester -- lichfield [distance=42, label=42] + leicester -- st_albans [distance=80, label=80] + lichfield -- derby [distance=25, label=25] + lichfield -- wolverhampton [distance=20, label=20] + lisburn -- armagh [distance=32, label=32] + lisburn -- belfast [distance=9, label=9] + lisburn -- newry [distance=31, label=31] + liverpool -- preston [distance=36, label=36] + london -- bath [distance=115, label=115] + london -- brighton [distance=53, label=53] + london -- bristol [distance=118, label=118] + london -- cambridge [distance=61, label=61] + london -- canterbury [distance=62, label=62] + london -- chelmsford [distance=40, label=40] + london -- coventry [distance=100, label=100] + london -- oxford [distance=58, label=58] + london -- peterborough [distance=85, label=85] + london -- portsmouth [distance=75, label=75] + london -- southampton [distance=79, label=79] + london -- southend_on_sea [distance=42, label=42] + london -- st_albans [distance=25, label=25] + london -- westminster [distance=1, label=1] + london -- winchester [distance=68, label=68] + manchester -- bradford [distance=39, label=39] + manchester -- leeds [distance=45, label=45] + manchester -- liverpool [distance=35, label=35] + manchester -- salford [distance=3, label=3] + manchester -- sheffield [distance=38, label=38] + manchester -- wakefield [distance=49, label=49] + newcastle -- carlisle [distance=66, label=66] + newcastle -- durham [distance=21, label=21] + newcastle -- sunderland [distance=14, label=14] + newport -- cardiff [distance=13, label=13] + newport -- gloucester [distance=53, label=53] + newport -- hereford [distance=45, label=45] + newport -- swansea [distance=50, label=50] + norwich -- chelmsford [distance=83, label=83] + nottingham -- derby [distance=16, label=16] + nottingham -- leicester [distance=30, label=30] + nottingham -- lincoln [distance=39, label=39] + nottingham -- sheffield [distance=45, label=45] + oxford -- bath [distance=82, label=82] + oxford -- bristol [distance=85, label=85] + oxford -- coventry [distance=59, label=59] + oxford -- gloucester [distance=48, label=48] + oxford -- winchester [distance=54, label=54] + oxford -- worcester [distance=61, label=61] + perth -- dundee [distance=22, label=22] + perth -- inverness [distance=112, label=112] + perth -- stirling [distance=37, label=37] + peterborough -- cambridge [distance=43, label=43] + peterborough -- ely [distance=30, label=30] + peterborough -- leicester [distance=41, label=41] + peterborough -- lincoln [distance=51, label=51] + peterborough -- nottingham [distance=58, label=58] + portsmouth -- chichester [distance=17, label=17] + preston -- bradford [distance=53, label=53] + preston -- lancaster [distance=21, label=21] + preston -- salford [distance=31, label=31] + ripon -- bradford [distance=30, label=30] + ripon -- durham [distance=54, label=54] + salisbury -- bath [distance=44, label=44] + salisbury -- wells [distance=46, label=46] + salisbury -- winchester [distance=25, label=25] + sheffield -- lincoln [distance=47, label=47] + southampton -- portsmouth [distance=19, label=19] + southampton -- salisbury [distance=24, label=24] + southampton -- winchester [distance=14, label=14] + st_asaph -- bangor [distance=34, label=34] + st_asaph -- chester [distance=30, label=30] + st_asaph -- hereford [distance=118, label=118] + st_asaph -- liverpool [distance=38, label=38] + st_asaph -- newport [distance=162, label=162] + st_asaph -- st_davids [distance=151, label=151] + st_asaph -- swansea [distance=152, label=152] + stirling -- glasgow [distance=29, label=29] + stoke_on_trent -- birmingham [distance=45, label=45] + stoke_on_trent -- chester [distance=42, label=42] + stoke_on_trent -- derby [distance=35, label=35] + stoke_on_trent -- lichfield [distance=31, label=31] + stoke_on_trent -- liverpool [distance=57, label=57] + stoke_on_trent -- manchester [distance=45, label=45] + stoke_on_trent -- wolverhampton [distance=34, label=34] + swansea -- bangor [distance=161, label=161] + swansea -- cardiff [distance=43, label=43] + swansea -- st_davids [distance=73, label=73] + truro -- plymouth [distance=55, label=55] + wakefield -- kingston [distance=58, label=58] + wakefield -- sheffield [distance=26, label=26] + worcester -- birmingham [distance=41, label=41] + worcester -- gloucester [distance=28, label=28] + worcester -- hereford [distance=27, label=27] + york -- durham [distance=71, label=71] + york -- ripon [distance=27, label=27] + york -- wakefield [distance=40, label=40] +} diff --git a/data_structure/python/materials-queue/src/thread_safe_queues.py b/data_structure/python/materials-queue/src/thread_safe_queues.py new file mode 100644 index 00000000..b2c3bb6c --- /dev/null +++ b/data_structure/python/materials-queue/src/thread_safe_queues.py @@ -0,0 +1,190 @@ +# thread_safe_queues.py + +import argparse +import threading +from dataclasses import dataclass, field +from enum import IntEnum +from itertools import zip_longest +from queue import LifoQueue, PriorityQueue, Queue +from random import choice, randint +from time import sleep + +from rich.align import Align +from rich.columns import Columns +from rich.console import Group +from rich.live import Live +from rich.panel import Panel + +QUEUE_TYPES = {"fifo": Queue, "lifo": LifoQueue, "heap": PriorityQueue} + +PRODUCTS = ( + ":balloon:", + ":cookie:", + ":crystal_ball:", + ":diving_mask:", + ":flashlight:", + ":gem:", + ":gift:", + ":kite:", + ":party_popper:", + ":postal_horn:", + ":ribbon:", + ":rocket:", + ":teddy_bear:", + ":thread:", + ":yo-yo:", +) + + +@dataclass(order=True) +class Product: + priority: int + label: str = field(compare=False) + + def __str__(self): + return self.label + + +class Priority(IntEnum): + HIGH = 1 + MEDIUM = 2 + LOW = 3 + + +PRIORITIZED_PRODUCTS = ( + Product(Priority.HIGH, ":1st_place_medal:"), + Product(Priority.MEDIUM, ":2nd_place_medal:"), + Product(Priority.LOW, ":3rd_place_medal:"), +) + + +class Worker(threading.Thread): + def __init__(self, speed, buffer): + super().__init__(daemon=True) + self.speed = speed + self.buffer = buffer + self.product = None + self.working = False + self.progress = 0 + + @property + def state(self): + if self.working: + return f"{self.product} ({self.progress}%)" + return ":zzz: Idle" + + def simulate_idle(self): + self.product = None + self.working = False + self.progress = 0 + sleep(randint(1, 3)) + + def simulate_work(self): + self.working = True + self.progress = 0 + delay = randint(1, 1 + 15 // self.speed) + for _ in range(100): + sleep(delay / 100) + self.progress += 1 + + +class Producer(Worker): + def __init__(self, speed, buffer, products): + super().__init__(speed, buffer) + self.products = products + + def run(self): + while True: + self.product = choice(self.products) + self.simulate_work() + self.buffer.put(self.product) + self.simulate_idle() + + +class Consumer(Worker): + def run(self): + while True: + self.product = self.buffer.get() + self.simulate_work() + self.buffer.task_done() + self.simulate_idle() + + +class View: + def __init__(self, buffer, producers, consumers): + self.buffer = buffer + self.producers = producers + self.consumers = consumers + + def animate(self): + with Live(self.render(), screen=True, refresh_per_second=10) as live: + while True: + live.update(self.render()) + + def render(self): + + match self.buffer: + case PriorityQueue(): + title = "Priority Queue" + products = map(str, reversed(list(self.buffer.queue))) + case LifoQueue(): + title = "Stack" + products = list(self.buffer.queue) + case Queue(): + title = "Queue" + products = reversed(list(self.buffer.queue)) + case _: + title = products = "" + + rows = [Panel(f"[bold]{title}:[/] {', '.join(products)}", width=82)] + pairs = zip_longest(self.producers, self.consumers) + for i, (producer, consumer) in enumerate(pairs, 1): + left_panel = self.panel(producer, f"Producer {i}") + right_panel = self.panel(consumer, f"Consumer {i}") + rows.append(Columns([left_panel, right_panel], width=40)) + return Group(*rows) + + def panel(self, worker, title): + if worker is None: + return "" + padding = " " * int(29 / 100 * worker.progress) + align = Align(padding + worker.state, align="left", vertical="middle") + return Panel(align, height=5, title=title) + + +def main(args): + buffer = QUEUE_TYPES[args.queue]() + products = PRIORITIZED_PRODUCTS if args.queue == "heap" else PRODUCTS + producers = [ + Producer(args.producer_speed, buffer, products) + for _ in range(args.producers) + ] + consumers = [ + Consumer(args.consumer_speed, buffer) for _ in range(args.consumers) + ] + + for producer in producers: + producer.start() + + for consumer in consumers: + consumer.start() + + view = View(buffer, producers, consumers) + view.animate() + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("-q", "--queue", choices=QUEUE_TYPES, default="fifo") + parser.add_argument("-p", "--producers", type=int, default=3) + parser.add_argument("-c", "--consumers", type=int, default=2) + parser.add_argument("-ps", "--producer-speed", type=int, default=1) + parser.add_argument("-cs", "--consumer-speed", type=int, default=1) + return parser.parse_args() + + +if __name__ == "__main__": + try: + main(parse_args()) + except KeyboardInterrupt: + pass diff --git a/data_structure/python/pq_1.py b/data_structure/python/pq_1.py new file mode 100644 index 00000000..77985bb9 --- /dev/null +++ b/data_structure/python/pq_1.py @@ -0,0 +1,32 @@ +#--------------------------------------------------------------- +# PRIORITY QUEUE (V1) +#--------------------------------------------------------------- + +# https://pieriantraining.com/python-tutorial-creating-a-priority-queue-in-python-2/ + +# V1 : via heapq +import heapq + +queue = [] + +def enqueue(item, priority): + heapq.heappush(queue, (priority, item)) + +def dequeue(): + if not queue: + return None + return heapq.heappop(queue)[1] + +# V2 : via PriorityQueue +import queue + +queue = queue.PriorityQueue() + +def enqueue(item, priority): + queue.put((priority, item)) + +def dequeue(): + if not queue: + return None + return queue.get()[1] + \ No newline at end of file diff --git a/data_structure/python/pq_2.py b/data_structure/python/pq_2.py new file mode 100644 index 00000000..c3519bdd --- /dev/null +++ b/data_structure/python/pq_2.py @@ -0,0 +1,81 @@ +#--------------------------------------------------------------- +# PRIORITY QUEUE (V2) +#--------------------------------------------------------------- + +# https://realpython.com/queue-in-python/ + +# queues.py + +from collections import deque +from dataclasses import dataclass +from heapq import heapify, heappop, heappush +from itertools import count +from typing import Any + + +class IterableMixin: + def __len__(self): + return len(self._elements) + + def __iter__(self): + while len(self) > 0: + yield self.dequeue() + + +class Queue(IterableMixin): + def __init__(self, *elements): + self._elements = deque(elements) + + def enqueue(self, element): + self._elements.append(element) + + def dequeue(self): + return self._elements.popleft() + + +class Stack(Queue): + def dequeue(self): + return self._elements.pop() + + +class PriorityQueue(IterableMixin): + def __init__(self): + self._elements = [] + self._counter = count() + + def enqueue_with_priority(self, priority, value): + element = (-priority, next(self._counter), value) + heappush(self._elements, element) + + def dequeue(self): + return heappop(self._elements)[-1] + + +@dataclass(order=True) +class Element: + priority: float + count: int + value: Any + + +class MutableMinHeap(IterableMixin): + def __init__(self): + super().__init__() + self._elements_by_value = {} + self._elements = [] + self._counter = count() + + def __setitem__(self, unique_value, priority): + if unique_value in self._elements_by_value: + self._elements_by_value[unique_value].priority = priority + heapify(self._elements) + else: + element = Element(priority, next(self._counter), unique_value) + self._elements_by_value[unique_value] = element + heappush(self._elements, element) + + def __getitem__(self, unique_value): + return self._elements_by_value[unique_value].priority + + def dequeue(self): + return heappop(self._elements).value \ No newline at end of file diff --git a/data_structure/python/pq_3.py b/data_structure/python/pq_3.py new file mode 100644 index 00000000..47d46c5f --- /dev/null +++ b/data_structure/python/pq_3.py @@ -0,0 +1,44 @@ +#--------------------------------------------------------------- +# PRIORITY QUEUE (V1) +#--------------------------------------------------------------- + +# https://www.geeksforgeeks.org/priority-queue-in-python/ + +class PriorityQueue(object): + def __init__(self): + self.queue = [] + + def __str__(self): + return ' '.join([str(i) for i in self.queue]) + + # for checking if the queue is empty + def isEmpty(self): + return len(self.queue) == 0 + + # for inserting an element in the queue + def insert(self, data): + self.queue.append(data) + + # for popping an element based on Priority + def delete(self): + try: + max_val = 0 + for i in range(len(self.queue)): + if self.queue[i] > self.queue[max_val]: + max_val = i + item = self.queue[max_val] + del self.queue[max_val] + return item + except IndexError: + print() + exit() + +if __name__ == '__main__': + myQueue = PriorityQueue() + myQueue.insert(12) + myQueue.insert(1) + myQueue.insert(14) + myQueue.insert(7) + print(myQueue) + while not myQueue.isEmpty(): + print(myQueue.delete()) \ No newline at end of file diff --git a/doc/cheatsheet/priority_queue.md b/doc/cheatsheet/priority_queue.md index e72ed2cc..8e11897a 100644 --- a/doc/cheatsheet/priority_queue.md +++ b/doc/cheatsheet/priority_queue.md @@ -2,12 +2,21 @@ ## 0) Concept -- Priority queue is one of the implementations of heap +- Priority queue is one of the implementations of heap +- Not follow "FIFO", but pop elelement with priority +- https://realpython.com/queue-in-python/ +- https://docs.python.org/zh-tw/3/library/heapq.html#priority-queue-implementation-notes ### 0-1) Types ### 0-2) Pattern +### 0-3) Use case + +- PQ task management +- BFS with PQ +- Dijkstra’s Algorithm with PQ + ## 1) General form ### 1-1) Basic OP