1234567891011121314151617181920212223242526272829303132 |
- import sys
- import json
- from cassandra.cluster import Cluster
-
- sys.path.append("config/db")
-
- config = json.load(open("./settings.json"))
-
- print(
- f"Attempting Cassandra connection @ {config['cassandra_addresses']}:{config['cassandra_port']}")
- cluster = Cluster(config['cassandra_addresses'],
- port=config['cassandra_port'])
- session = cluster.connect(config['cassandra_keyspace'])
- print(f"Connection OK")
-
- result = session.execute("SELECT * FROM clusters")
- print(result.all())
-
-
- """
- sc = pyspark.SparkContext('spark://osboxes:7077')
-
- data = sc.parallelize(list("aaa bbb cc dd e f"))
- counts = data \
- .map(lambda x: (x, 1)) \
- .reduceByKey(add) \
- .sortBy(lambda x: x[1], ascending=False) \
- .collect()
-
- for (word, count) in counts:
- print("{}: {}".format(word, count))
- """
|