1from datetime import datetime
2from elasticsearch import Elasticsearch
3es = Elasticsearch()
4
5doc = {
6 'author': 'kimchy',
7 'text': 'Elasticsearch: cool. bonsai cool.',
8 'timestamp': datetime.now(),
9}
10res = es.index(index="test-index", id=1, body=doc)
11print(res['result'])
12
13res = es.get(index="test-index", id=1)
14print(res['_source'])
15
16es.indices.refresh(index="test-index")
17
18res = es.search(index="test-index", body={"query": {"match_all": {}}})
19print("Got %d Hits:" % res['hits']['total']['value'])
20for hit in res['hits']['hits']:
21 print("%(timestamp)s %(author)s: %(text)s" % hit["_source"])
22
1# Elasticsearch 7.x
2elasticsearch>=7.0.0,<8.0.0
3
4# Elasticsearch 6.x
5elasticsearch>=6.0.0,<7.0.0
6
7# Elasticsearch 5.x
8elasticsearch>=5.0.0,<6.0.0
9
10# Elasticsearch 2.x
11elasticsearch>=2.0.0,<3.0.0
12
1# allow up to 25 connections to each node
2es = Elasticsearch(["host1", "host2"], maxsize=25)
3
1from elasticsearch import Elasticsearch
2
3# by default we don't sniff, ever
4es = Elasticsearch()
5
6# you can specify to sniff on startup to inspect the cluster and load
7# balance across all nodes
8es = Elasticsearch(["seed1", "seed2"], sniff_on_start=True)
9
10# you can also sniff periodically and/or after failure:
11es = Elasticsearch(["seed1", "seed2"],
12 sniff_on_start=True,
13 sniff_on_connection_fail=True,
14 sniffer_timeout=60)
15