How to upload watcher in docker elastic search/Kibana?

I am uploading a custom watcher in Kibana. Now while using DevTools, its is acknowledging the json and everything is working fine:

PUT _watcher/watch/new_test_watcher
{
    "trigger": {
        "schedule": {
            "interval": "10m"
        }
    },
    "input": {
        "search": {
            "request": {
                "search_type": "query_then_fetch",
                "indices": [
                    ".ds-flow-netflow*"
                ],
                "rest_total_hits_as_int": true,
                "body": {
                    "size": 0,
                    "query": {
                        "bool": {
                            "must": [
                                {
                                    "range": {
                                        "@timestamp": {
                                            "gte": "now-7d/d",
                                            "lt": "now/d"
                                        }
                                    }
                                },
                                {
                                    "match": {
                                        "diff.reportType": 1
                                    }
                                },
                                {
                                    "match": {
                                        "flow1": "<CHANGE_ME>"
                                    }
                                },
                                {
                                    "match": {
                                        "dP": "<CHANGE_ME>"
                                    }
                                }
                            ]
                        }
                    },
                    "aggs": {
                        "historical_avg": {
                            "avg": {
                                "field": "diff.count"
                            }
                        },
                        "current_intervals": {
                            "date_histogram": {
                                "field": "@timestamp",
                                "fixed_interval": "5m"
                            },
                            "aggs": {
                                "max_value": {
                                    "max": {
                                        "field": "diff.count"
                                    }
                                }
                            }
                        },
                        "most_frequent_flows": {
                            "terms": {
                                "field": "flow1.keyword"
                            }
                        },
                        "most_frequent_dP": {
                            "terms": {
                                "field": "dP"
                            }
                        }
                    }
                }
            }
        }
    },
    "condition": {
        "script": {
            "source": """// Get params.
                def percentage = params.percentage_increase;

                // Set initial variables.
                def percentage_increase = 0;
                def internal_max = 0;
                def current_max = 0;
                def flow = "";
                def dP = "";

                // Set variables for comparison.
                def average = ctx.payload.aggregations['historical_avg'].value;
                if (average!=null) {
                        percentage_increase = average + (percentage *average)/100;
                }
                // Push all occurrences of alert in a list.
                def buckets = ctx.payload.aggregations['current_intervals'].buckets;
                List new_list = new ArrayList();
                for (item in buckets) {
                    current_max = item['max_value'].value;
                    if (current_max!=null) {
                        if (percentage_increase <= current_max) {
                            new_list.add(item);
                        }
                    }
                }
                // Comment these next set of lines if you want all occurrences instead of just one last. Useful during dugging.
                // START - Pick Last
                if (new_list.size()>0) {
                    List temp_list = new ArrayList();
                    temp_list.add(new_list[-1]);
                    new_list = temp_list;
                }
                // END - Pick Last
                // Format in a message.
                def message = "";
                for (key in new_list) {
                    message += "\\n";
                    if (key.key_as_string!=null) {
                        message += "\\t- " + "ts: " + key.key_as_string + "," + '\\n';
                    }
                    if (key.max_value!=null) {
                        message += "\\t- " + "max_value" + ":" + key.max_value.value + "," + '\\n';
                    }
                    message += "";
                }
                message += "";

                if (ctx.payload.aggregations['most_frequent_flows'].buckets.size()>0) {
                    flow = ctx.payload.aggregations['most_frequent_flows'].buckets[0].key;
                }
                if (ctx.payload.aggregations['most_frequent_dP'].buckets.size()>0) {
                    dP = ctx.payload.aggregations['most_frequent_dP'].buckets[0].key;
                }
                // Assign in payload for logging.
                ctx.vars.params = [message, percentage, average, dP, flow];
                // Uncomment the next line for debug mode.
                //Debug.explain(ctx);

                if (message!="") {
                    return true;
                } else {
                    return false;
                }
            """,
            "lang": "painless",
            "params": {
                "percentage_increase": 0
            }
        }
    },
    "actions": {
        "webhook-mailing-action": {
            "webhook": {
                "host": "A.B.C.D",
                "port": 8000,
                "method": "post",
                "scheme": "http",
                "body": """{
                    "message": "# Alert for drop max
                        **Alert for drop max was triggered.**
                        **Details:**
                        - Flow: {{ctx.vars.params.4}}
                        - dP: {{ctx.vars.params.3}}
                        - Percentage Threshold: {{ctx.vars.params.1}}%
                        - Historical drop (average): {{ctx.vars.params.2}} ns
                        Current interval max seen at the following intervals: {{ctx.vars.params.0}}",
                    "host": "A.B.C.D",
                    "port": 8000,
                    "method": "post",
                    "scheme": "http",
                    "kibana_email_connector": "SMTPForAlerts",
                    "to": "example@example.com",
                    "subject": "Alert for drop max was triggerred!"
                }"""
            }
        }
    }
}

But when I run the same using Curl in docker, it fails with following:

{"error":{"root_cause":[{"type":"x_content_parse_exception","reason":"[83:23] [script] failed to parse object"}],"type":"x_content_parse_exception","reason":"[83:23] [script] failed to parse object","caused_by":{"type":"json_parse_exception","reason":"Unexpected character ('\"' (code 34)): was expecting comma to separate Object entries\n at [Source: (org.elasticsearch.common.io.stream.ByteBufferStreamInput); line: 83, column: 26]"}},"status":400}

Now I do not want to change my json, the script is perfect. How do I upload the json as is without kibana/curl trying to parse it.