Given a JSON configuration file as such
{
"queue_args":{
"host" :"localhost",
"port" :"15672",
"virtual_host" :"/",
"channel_max" :"None", /* Int of AMQP channel_max value*/
"frame_max" :"None", /* Int of AMQP frame_max value*/
"heartbeat_interval" :"None", /* Int of AMQP heartbeat_interval*/
"ssl" :"None", /* Bool to enable ssl*/
"ssl_options" :"None", /* Dict of ssl options. See https://www.rabbitmq.com/ssl.html*/
"connection_attempts" :"1000", /* Int maximum number of retry attempts*/
"retry_delay" :"0.25", /* Float time to wait in seconds, before the next.*/
"socket_timeout" :"None", /* Int socket timeout (in seconds?) for high latency networks*/
"locale" :"None",
"backpressure_detection" :"None", /* Bool to toggle backpressure detection*/
"login" :"guest",
"password" :"guest",
"exchange" :"",
"exchange_type" :"fanout"
},
"daemon_args":{
"daemon" : "False", /* Bool to run as a daemon rather than as an immediate process*/
"pidfile" : "StreamMessage.pid", /* the daemon PID file (default: %default)*/
"working-dir": ".", /* the directory to run the daemon in*/
"uid" : "os.getuid()", /* the userid to run the daemon as (default: inherited from parent process)*/
"gid" : "os.getgid()", /* the groupid to run the daemon as (default: inherited from parent process)*/
"umask" : "0022", /* the umask for files created by the daemon (default: 0022)*/
"stdout" : "False", /* sends standard output to the file STDOUT if set*/
"stderr" : "False" /* sends standard error to the file STDERR if set*/
},
"spark_args":{
"connection": "main", /* The name of the connection configuration to host */
"connector": "connection", /* Override the connector implementation entry point */
"class": "MessageReceiver", /* The entry point for your application (e.g. org.apache.spark.examples.SparkPi)*/
"master": "spark://192.168.56.101", /* The master URL for the cluster. TODO: determine correct port (e.g. spark://23.195.26.187:7077)*/
"deploy-mode": "client", /* Whether to deploy your driver on the worker nodes (cluster) or locally as an external client (client)*/
"conf": "None", /* Arbitrary Spark configuration property in key=value format. For values that contain spaces wrap “key=value” in quotes (as shown).*/
"application-jar": "None", /* Path to jar including your application and all dependencies. Must be globally visible inside of your cluster, hdfs:// or file:// present on all nodes.*/
"application-arguments": "None" /* Arguments passed to the main method of your main class, if any*/
}
}
You will need to minify, parse, and unit test.
from jsmin import jsmin
import json
json_file = 'c:\\Temp\\config.json'
raw_data=open(json_file)
mini_data=jsmin(raw_data.read())
json_data=json.loads(mini_data)
print json_data['rabbit_args']['exchange_type']
raw_data.close()
{
"queue_args":{
"host" :"localhost",
"port" :"15672",
"virtual_host" :"/",
"channel_max" :"None", /* Int of AMQP channel_max value*/
"frame_max" :"None", /* Int of AMQP frame_max value*/
"heartbeat_interval" :"None", /* Int of AMQP heartbeat_interval*/
"ssl" :"None", /* Bool to enable ssl*/
"ssl_options" :"None", /* Dict of ssl options. See https://www.rabbitmq.com/ssl.html*/
"connection_attempts" :"1000", /* Int maximum number of retry attempts*/
"retry_delay" :"0.25", /* Float time to wait in seconds, before the next.*/
"socket_timeout" :"None", /* Int socket timeout (in seconds?) for high latency networks*/
"locale" :"None",
"backpressure_detection" :"None", /* Bool to toggle backpressure detection*/
"login" :"guest",
"password" :"guest",
"exchange" :"",
"exchange_type" :"fanout"
},
"daemon_args":{
"daemon" : "False", /* Bool to run as a daemon rather than as an immediate process*/
"pidfile" : "StreamMessage.pid", /* the daemon PID file (default: %default)*/
"working-dir": ".", /* the directory to run the daemon in*/
"uid" : "os.getuid()", /* the userid to run the daemon as (default: inherited from parent process)*/
"gid" : "os.getgid()", /* the groupid to run the daemon as (default: inherited from parent process)*/
"umask" : "0022", /* the umask for files created by the daemon (default: 0022)*/
"stdout" : "False", /* sends standard output to the file STDOUT if set*/
"stderr" : "False" /* sends standard error to the file STDERR if set*/
},
"spark_args":{
"connection": "main", /* The name of the connection configuration to host */
"connector": "connection", /* Override the connector implementation entry point */
"class": "MessageReceiver", /* The entry point for your application (e.g. org.apache.spark.examples.SparkPi)*/
"master": "spark://192.168.56.101", /* The master URL for the cluster. TODO: determine correct port (e.g. spark://23.195.26.187:7077)*/
"deploy-mode": "client", /* Whether to deploy your driver on the worker nodes (cluster) or locally as an external client (client)*/
"conf": "None", /* Arbitrary Spark configuration property in key=value format. For values that contain spaces wrap “key=value” in quotes (as shown).*/
"application-jar": "None", /* Path to jar including your application and all dependencies. Must be globally visible inside of your cluster, hdfs:// or file:// present on all nodes.*/
"application-arguments": "None" /* Arguments passed to the main method of your main class, if any*/
}
}
You will need to minify, parse, and unit test.
from jsmin import jsmin
import json
json_file = 'c:\\Temp\\config.json'
raw_data=open(json_file)
mini_data=jsmin(raw_data.read())
json_data=json.loads(mini_data)
print json_data['rabbit_args']['exchange_type']
raw_data.close()
No comments:
Post a Comment